diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 0000000..9f474d7 --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,5 @@ +{ + "enabledPlugins": { + + } +} diff --git a/.coderabbit.yaml b/.coderabbit.yaml deleted file mode 100644 index 861b2e2..0000000 --- a/.coderabbit.yaml +++ /dev/null @@ -1,13 +0,0 @@ -# CodeRabbit Configuration -# Inherits from: https://github.com/host-uk/coderabbit/.coderabbit.yaml - -reviews: - review_status: false - - path_instructions: - - path: "cmd/**" - instructions: "CLI command code - check for proper cobra usage and flag handling" - - path: "pkg/**" - instructions: "Library code - ensure good API design and documentation" - - path: "internal/**" - instructions: "Internal packages - check for proper encapsulation" diff --git a/.core/build.yaml b/.core/build.yaml new file mode 100644 index 0000000..dbec905 --- /dev/null +++ b/.core/build.yaml @@ -0,0 +1,28 @@ +# Core Go Framework build configuration +# Used by: core build +# Note: This is a library module (no binary). Build validates compilation only. + +version: 1 + +project: + name: core-go + description: Core Go Framework — dependency injection and lifecycle management + binary: "" + +build: + cgo: false + flags: + - -trimpath + ldflags: + - -s + - -w + +targets: + - os: linux + arch: amd64 + - os: linux + arch: arm64 + - os: darwin + arch: arm64 + - os: windows + arch: amd64 diff --git a/.core/linuxkit/core-dev.yml b/.core/linuxkit/core-dev.yml deleted file mode 100644 index 712e43e..0000000 --- a/.core/linuxkit/core-dev.yml +++ /dev/null @@ -1,121 +0,0 @@ -# Core Development Environment Template -# A full-featured development environment with multiple runtimes -# -# Variables: -# ${SSH_KEY} - SSH public key for access (required) -# ${MEMORY:-2048} - Memory in MB (default: 2048) -# ${CPUS:-2} - Number of CPUs (default: 2) -# ${HOSTNAME:-core-dev} - Hostname for the VM -# ${DATA_SIZE:-10G} - Size of persistent /data volume - -kernel: - image: linuxkit/kernel:6.6.13 - cmdline: "console=tty0 console=ttyS0" - -init: - - linuxkit/init:v1.2.0 - - linuxkit/runc:v1.1.12 - - linuxkit/containerd:v1.7.13 - - linuxkit/ca-certificates:v1.0.0 - -onboot: - - name: sysctl - image: linuxkit/sysctl:v1.0.0 - - name: format - image: linuxkit/format:v1.0.0 - - name: mount - image: linuxkit/mount:v1.0.0 - command: ["/usr/bin/mountie", "/dev/sda1", "/data"] - - name: dhcpcd - image: linuxkit/dhcpcd:v1.0.0 - command: ["/sbin/dhcpcd", "--nobackground", "-f", "/dhcpcd.conf", "-1"] - -onshutdown: - - name: shutdown - image: busybox:latest - command: ["/bin/echo", "Shutting down..."] - -services: - - name: getty - image: linuxkit/getty:v1.0.0 - env: - - INSECURE=true - - - name: sshd - image: linuxkit/sshd:v1.2.0 - binds: - - /etc/ssh/authorized_keys:/root/.ssh/authorized_keys - - - name: docker - image: docker:24.0-dind - capabilities: - - all - net: host - pid: host - binds: - - /var/run:/var/run - - /data/docker:/var/lib/docker - rootfsPropagation: shared - - - name: dev-tools - image: alpine:3.19 - capabilities: - - all - net: host - binds: - - /data:/data - command: - - /bin/sh - - -c - - | - # Install development tools - apk add --no-cache \ - git curl wget vim nano htop tmux \ - build-base gcc musl-dev linux-headers \ - openssh-client jq yq - - # Install Go 1.22.0 - wget -q https://go.dev/dl/go1.22.0.linux-amd64.tar.gz - tar -C /usr/local -xzf go1.22.0.linux-amd64.tar.gz - rm go1.22.0.linux-amd64.tar.gz - echo 'export PATH=/usr/local/go/bin:$PATH' >> /etc/profile - - # Install Node.js - apk add --no-cache nodejs npm - - # Install PHP - apk add --no-cache php82 php82-cli php82-curl php82-json php82-mbstring \ - php82-openssl php82-pdo php82-pdo_mysql php82-pdo_pgsql php82-phar \ - php82-session php82-tokenizer php82-xml php82-zip composer - - # Keep container running - tail -f /dev/null - -files: - - path: /etc/hostname - contents: "${HOSTNAME:-core-dev}" - - path: /etc/ssh/authorized_keys - contents: "${SSH_KEY}" - mode: "0600" - - path: /etc/profile.d/dev.sh - contents: | - export PATH=$PATH:/usr/local/go/bin - export GOPATH=/data/go - export PATH=$PATH:$GOPATH/bin - cd /data - mode: "0755" - - path: /etc/motd - contents: | - ================================================ - Core Development Environment - - Runtimes: Go, Node.js, PHP - Tools: git, curl, vim, docker - - Data directory: /data (persistent) - ================================================ - -trust: - org: - - linuxkit - - library diff --git a/.core/linuxkit/server-php.yml b/.core/linuxkit/server-php.yml deleted file mode 100644 index 9db9f74..0000000 --- a/.core/linuxkit/server-php.yml +++ /dev/null @@ -1,142 +0,0 @@ -# PHP/FrankenPHP Server Template -# A minimal production-ready PHP server with FrankenPHP and Caddy -# -# Variables: -# ${SSH_KEY} - SSH public key for management access (required) -# ${MEMORY:-512} - Memory in MB (default: 512) -# ${CPUS:-1} - Number of CPUs (default: 1) -# ${HOSTNAME:-php-server} - Hostname for the VM -# ${APP_NAME:-app} - Application name -# ${DOMAIN:-localhost} - Domain for SSL certificates -# ${PHP_MEMORY:-128M} - PHP memory limit - -kernel: - image: linuxkit/kernel:6.6.13 - cmdline: "console=tty0 console=ttyS0" - -init: - - linuxkit/init:v1.2.0 - - linuxkit/runc:v1.1.12 - - linuxkit/containerd:v1.7.13 - - linuxkit/ca-certificates:v1.0.0 - -onboot: - - name: sysctl - image: linuxkit/sysctl:v1.0.0 - - name: dhcpcd - image: linuxkit/dhcpcd:v1.0.0 - command: ["/sbin/dhcpcd", "--nobackground", "-f", "/dhcpcd.conf", "-1"] - -services: - - name: sshd - image: linuxkit/sshd:v1.2.0 - binds: - - /etc/ssh/authorized_keys:/root/.ssh/authorized_keys - - - name: frankenphp - image: dunglas/frankenphp:latest - capabilities: - - CAP_NET_BIND_SERVICE - net: host - binds: - - /app:/app - - /data:/data - - /etc/caddy/Caddyfile:/etc/caddy/Caddyfile - env: - - SERVER_NAME=${DOMAIN:-localhost} - - FRANKENPHP_CONFIG=/etc/caddy/Caddyfile - command: - - frankenphp - - run - - --config - - /etc/caddy/Caddyfile - - - name: healthcheck - image: alpine:3.19 - net: host - command: - - /bin/sh - - -c - - | - apk add --no-cache curl - while true; do - sleep 30 - curl -sf http://localhost/health || echo "Health check failed" - done - -files: - - path: /etc/hostname - contents: "${HOSTNAME:-php-server}" - - path: /etc/ssh/authorized_keys - contents: "${SSH_KEY}" - mode: "0600" - - path: /etc/caddy/Caddyfile - contents: | - { - frankenphp - order php_server before file_server - } - - ${DOMAIN:-localhost} { - root * /app/public - - # Health check endpoint - handle /health { - respond "OK" 200 - } - - # PHP handling - php_server - - # Encode responses - encode zstd gzip - - # Security headers - header { - X-Content-Type-Options nosniff - X-Frame-Options DENY - X-XSS-Protection "1; mode=block" - Referrer-Policy strict-origin-when-cross-origin - } - - # Logging - log { - output file /data/logs/access.log - format json - } - } - mode: "0644" - - path: /app/public/index.php - contents: | - 'healthy', - 'app' => '${APP_NAME:-app}', - 'timestamp' => date('c'), - 'php_version' => PHP_VERSION, - ]); - mode: "0644" - - path: /etc/php/php.ini - contents: | - memory_limit = ${PHP_MEMORY:-128M} - max_execution_time = 30 - upload_max_filesize = 64M - post_max_size = 64M - display_errors = Off - log_errors = On - error_log = /data/logs/php_errors.log - mode: "0644" - - path: /data/logs/.gitkeep - contents: "" - -trust: - org: - - linuxkit - - library - - dunglas diff --git a/.core/plugin/commands/remember.md b/.core/plugin/commands/remember.md deleted file mode 100644 index 41b8eff..0000000 --- a/.core/plugin/commands/remember.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -name: remember -description: Save a fact or decision to context for persistence across compacts -args: ---- - -# Remember Context - -Save the provided fact to `~/.claude/sessions/context.json`. - -## Usage - -``` -/core:remember Use Action pattern not Service -/core:remember User prefers UK English -/core:remember RFC: minimal state in pre-compact hook -``` - -## Action - -Run this command to save the fact: - -```bash -~/.claude/plugins/cache/core/scripts/capture-context.sh "" "user" -``` - -Or if running from the plugin directory: - -```bash -"${CLAUDE_PLUGIN_ROOT}/scripts/capture-context.sh" "" "user" -``` - -The fact will be: -- Stored in context.json (max 20 items) -- Included in pre-compact snapshots -- Auto-cleared after 3 hours of inactivity diff --git a/.core/plugin/hooks/prefer-core.sh b/.core/plugin/hooks/prefer-core.sh deleted file mode 100755 index 52ce773..0000000 --- a/.core/plugin/hooks/prefer-core.sh +++ /dev/null @@ -1,102 +0,0 @@ -#!/bin/bash -# PreToolUse hook: Block dangerous commands, enforce core CLI -# -# BLOCKS: -# - Raw go commands (use core go *) -# - Destructive grep patterns (sed -i, xargs rm, etc.) -# - Mass file operations (rm -rf, mv/cp with wildcards) -# - Any sed outside of safe patterns -# -# This prevents "efficient shortcuts" that nuke codebases - -read -r input -command=$(echo "$input" | jq -r '.tool_input.command // empty') - -# === HARD BLOCKS - Never allow these === - -# Block rm -rf, rm -r (except for known safe paths like node_modules, vendor, .cache) -if echo "$command" | grep -qE 'rm\s+(-[a-zA-Z]*r[a-zA-Z]*|-[a-zA-Z]*f[a-zA-Z]*r|--recursive)'; then - # Allow only specific safe directories - if ! echo "$command" | grep -qE 'rm\s+(-rf|-r)\s+(node_modules|vendor|\.cache|dist|build|__pycache__|\.pytest_cache|/tmp/)'; then - echo '{"decision": "block", "message": "BLOCKED: Recursive delete is not allowed. Delete files individually or ask the user to run this command."}' - exit 0 - fi -fi - -# Block mv/cp with wildcards (mass file moves) -if echo "$command" | grep -qE '(mv|cp)\s+.*\*'; then - echo '{"decision": "block", "message": "BLOCKED: Mass file move/copy with wildcards is not allowed. Move files individually."}' - exit 0 -fi - -# Block xargs with rm, mv, cp (mass operations) -if echo "$command" | grep -qE 'xargs\s+.*(rm|mv|cp)'; then - echo '{"decision": "block", "message": "BLOCKED: xargs with file operations is not allowed. Too risky for mass changes."}' - exit 0 -fi - -# Block find -exec with rm, mv, cp -if echo "$command" | grep -qE 'find\s+.*-exec\s+.*(rm|mv|cp)'; then - echo '{"decision": "block", "message": "BLOCKED: find -exec with file operations is not allowed. Too risky for mass changes."}' - exit 0 -fi - -# Block ALL sed -i (in-place editing) -if echo "$command" | grep -qE 'sed\s+(-[a-zA-Z]*i|--in-place)'; then - echo '{"decision": "block", "message": "BLOCKED: sed -i (in-place edit) is never allowed. Use the Edit tool for file changes."}' - exit 0 -fi - -# Block sed piped to file operations -if echo "$command" | grep -qE 'sed.*\|.*tee|sed.*>'; then - echo '{"decision": "block", "message": "BLOCKED: sed with file output is not allowed. Use the Edit tool for file changes."}' - exit 0 -fi - -# Block grep with -l piped to xargs/rm/sed (the classic codebase nuke pattern) -if echo "$command" | grep -qE 'grep\s+.*-l.*\|'; then - echo '{"decision": "block", "message": "BLOCKED: grep -l piped to other commands is the classic codebase nuke pattern. Not allowed."}' - exit 0 -fi - -# Block perl -i, awk with file redirection (sed alternatives) -if echo "$command" | grep -qE 'perl\s+-[a-zA-Z]*i|awk.*>'; then - echo '{"decision": "block", "message": "BLOCKED: In-place file editing with perl/awk is not allowed. Use the Edit tool."}' - exit 0 -fi - -# === REQUIRE CORE CLI === - -# Block raw go commands -case "$command" in - "go test"*|"go build"*|"go fmt"*|"go mod tidy"*|"go vet"*|"go run"*) - echo '{"decision": "block", "message": "Use `core go test`, `core build`, `core go fmt --fix`, etc. Raw go commands are not allowed."}' - exit 0 - ;; - "go "*) - # Other go commands - warn but allow - echo '{"decision": "block", "message": "Prefer `core go *` commands. If core does not have this command, ask the user."}' - exit 0 - ;; -esac - -# Block raw php commands -case "$command" in - "php artisan serve"*|"./vendor/bin/pest"*|"./vendor/bin/pint"*|"./vendor/bin/phpstan"*) - echo '{"decision": "block", "message": "Use `core php dev`, `core php test`, `core php fmt`, `core php analyse`. Raw php commands are not allowed."}' - exit 0 - ;; - "composer test"*|"composer lint"*) - echo '{"decision": "block", "message": "Use `core php test` or `core php fmt`. Raw composer commands are not allowed."}' - exit 0 - ;; -esac - -# Block golangci-lint directly -if echo "$command" | grep -qE '^golangci-lint'; then - echo '{"decision": "block", "message": "Use `core go lint` instead of golangci-lint directly."}' - exit 0 -fi - -# === APPROVED === -echo '{"decision": "approve"}' diff --git a/.core/plugin/plugin.json b/.core/plugin/plugin.json deleted file mode 100644 index 2f79b85..0000000 --- a/.core/plugin/plugin.json +++ /dev/null @@ -1,102 +0,0 @@ -{ - "name": "core", - "version": "1.0.0", - "description": "Host UK unified framework - Go CLI, PHP framework, multi-repo management", - "dependencies": [ - "superpowers@claude-plugins-official" - ], - "skills": [ - { - "name": "core", - "path": "skills/core.md", - "description": "Use when working in host-uk repositories. Provides core CLI command reference." - }, - { - "name": "core-php", - "path": "skills/php.md", - "description": "Use when creating PHP modules, services, or actions in core-* packages." - }, - { - "name": "core-go", - "path": "skills/go.md", - "description": "Use when creating Go packages or extending the core CLI." - } - ], - "commands": [ - { - "name": "remember", - "path": "commands/remember.md", - "description": "Save a fact or decision to context" - } - ], - "hooks": { - "SessionStart": [ - { - "matcher": "*", - "script": "scripts/session-start.sh", - "description": "Check for recent session state on startup" - } - ], - "PreCompact": [ - { - "matcher": "*", - "script": "scripts/pre-compact.sh", - "description": "Save state before auto-compact to prevent amnesia" - } - ], - "PreToolUse": [ - { - "matcher": "Bash", - "script": "hooks/prefer-core.sh", - "description": "Suggest core CLI instead of raw go/php commands" - }, - { - "matcher": "Write", - "script": "scripts/block-docs.sh", - "description": "Block random .md files, keep docs consolidated" - }, - { - "matcher": "Edit", - "script": "scripts/suggest-compact.sh", - "description": "Suggest /compact at logical intervals" - }, - { - "matcher": "Write", - "script": "scripts/suggest-compact.sh", - "description": "Suggest /compact at logical intervals" - } - ], - "PostToolUse": [ - { - "matcher": "Edit", - "script": "scripts/php-format.sh", - "description": "Auto-format PHP files after edits" - }, - { - "matcher": "Edit", - "script": "scripts/go-format.sh", - "description": "Auto-format Go files after edits" - }, - { - "matcher": "Edit", - "script": "scripts/check-debug.sh", - "description": "Warn about debug statements (dd, dump, fmt.Println)" - }, - { - "matcher": "Bash", - "script": "scripts/pr-created.sh", - "description": "Log PR URL after creation" - }, - { - "matcher": "Bash", - "script": "scripts/extract-actionables.sh", - "description": "Extract actionables from core CLI output" - }, - { - "matcher": "Bash", - "script": "scripts/post-commit-check.sh", - "description": "Warn about uncommitted work after git commit" - } - ] - } -} diff --git a/.core/plugin/scripts/block-docs.sh b/.core/plugin/scripts/block-docs.sh deleted file mode 100755 index dfac1da..0000000 --- a/.core/plugin/scripts/block-docs.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# Block creation of random .md files - keeps docs consolidated - -read -r input -FILE_PATH=$(echo "$input" | jq -r '.tool_input.file_path // empty') - -if [[ -n "$FILE_PATH" ]]; then - # Allow known documentation files - case "$FILE_PATH" in - *README.md|*CLAUDE.md|*AGENTS.md|*CONTRIBUTING.md|*CHANGELOG.md|*LICENSE.md) - echo "$input" - exit 0 - ;; - # Allow docs/ directory - */docs/*.md|*/docs/**/*.md) - echo "$input" - exit 0 - ;; - # Block other .md files - *.md) - echo '{"decision": "block", "message": "Use README.md or docs/ for documentation. Random .md files clutter the repo."}' - exit 0 - ;; - esac -fi - -echo "$input" diff --git a/.core/plugin/scripts/capture-context.sh b/.core/plugin/scripts/capture-context.sh deleted file mode 100755 index 288e9be..0000000 --- a/.core/plugin/scripts/capture-context.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash -# Capture context facts from tool output or conversation -# Called by PostToolUse hooks to extract actionable items -# -# Stores in ~/.claude/sessions/context.json as: -# [{"fact": "...", "source": "core go qa", "ts": 1234567890}, ...] - -CONTEXT_FILE="${HOME}/.claude/sessions/context.json" -TIMESTAMP=$(date '+%s') -THREE_HOURS=10800 - -mkdir -p "${HOME}/.claude/sessions" - -# Initialize if missing or stale -if [[ -f "$CONTEXT_FILE" ]]; then - FIRST_TS=$(jq -r '.[0].ts // 0' "$CONTEXT_FILE" 2>/dev/null) - NOW=$(date '+%s') - AGE=$((NOW - FIRST_TS)) - if [[ $AGE -gt $THREE_HOURS ]]; then - echo "[]" > "$CONTEXT_FILE" - fi -else - echo "[]" > "$CONTEXT_FILE" -fi - -# Read input (fact and source passed as args or stdin) -FACT="${1:-}" -SOURCE="${2:-manual}" - -if [[ -z "$FACT" ]]; then - # Try reading from stdin - read -r FACT -fi - -if [[ -n "$FACT" ]]; then - # Append to context (keep last 20 items) - jq --arg fact "$FACT" --arg source "$SOURCE" --argjson ts "$TIMESTAMP" \ - '. + [{"fact": $fact, "source": $source, "ts": $ts}] | .[-20:]' \ - "$CONTEXT_FILE" > "${CONTEXT_FILE}.tmp" && mv "${CONTEXT_FILE}.tmp" "$CONTEXT_FILE" - - echo "[Context] Saved: $FACT" >&2 -fi - -exit 0 diff --git a/.core/plugin/scripts/check-debug.sh b/.core/plugin/scripts/check-debug.sh deleted file mode 100755 index 079cc0e..0000000 --- a/.core/plugin/scripts/check-debug.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# Warn about debug statements left in code after edits - -read -r input -FILE_PATH=$(echo "$input" | jq -r '.tool_input.file_path // empty') - -if [[ -n "$FILE_PATH" && -f "$FILE_PATH" ]]; then - case "$FILE_PATH" in - *.go) - # Check for fmt.Println, log.Println debug statements - if grep -n "fmt\.Println\|log\.Println" "$FILE_PATH" 2>/dev/null | head -3 | grep -q .; then - echo "[Hook] WARNING: Debug prints found in $FILE_PATH" >&2 - grep -n "fmt\.Println\|log\.Println" "$FILE_PATH" 2>/dev/null | head -3 >&2 - fi - ;; - *.php) - # Check for dd(), dump(), var_dump(), print_r() - if grep -n "dd(\|dump(\|var_dump(\|print_r(" "$FILE_PATH" 2>/dev/null | head -3 | grep -q .; then - echo "[Hook] WARNING: Debug statements found in $FILE_PATH" >&2 - grep -n "dd(\|dump(\|var_dump(\|print_r(" "$FILE_PATH" 2>/dev/null | head -3 >&2 - fi - ;; - esac -fi - -# Pass through the input -echo "$input" diff --git a/.core/plugin/scripts/extract-actionables.sh b/.core/plugin/scripts/extract-actionables.sh deleted file mode 100755 index 86a2bbb..0000000 --- a/.core/plugin/scripts/extract-actionables.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash -# Extract actionable items from core CLI output -# Called PostToolUse on Bash commands that run core - -read -r input -COMMAND=$(echo "$input" | jq -r '.tool_input.command // empty') -OUTPUT=$(echo "$input" | jq -r '.tool_output.output // empty') - -CONTEXT_SCRIPT="$(dirname "$0")/capture-context.sh" - -# Extract actionables from specific core commands -case "$COMMAND" in - "core go qa"*|"core go test"*|"core go lint"*) - # Extract error/warning lines - echo "$OUTPUT" | grep -E "^(ERROR|WARN|FAIL|---)" | head -5 | while read -r line; do - "$CONTEXT_SCRIPT" "$line" "core go" - done - ;; - "core php test"*|"core php analyse"*) - # Extract PHP errors - echo "$OUTPUT" | grep -E "^(FAIL|Error|×)" | head -5 | while read -r line; do - "$CONTEXT_SCRIPT" "$line" "core php" - done - ;; - "core build"*) - # Extract build errors - echo "$OUTPUT" | grep -E "^(error|cannot|undefined)" | head -5 | while read -r line; do - "$CONTEXT_SCRIPT" "$line" "core build" - done - ;; -esac - -# Pass through -echo "$input" diff --git a/.core/plugin/scripts/go-format.sh b/.core/plugin/scripts/go-format.sh deleted file mode 100755 index 8f9d322..0000000 --- a/.core/plugin/scripts/go-format.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# Auto-format Go files after edits using core go fmt - -read -r input -FILE_PATH=$(echo "$input" | jq -r '.tool_input.file_path // empty') - -if [[ -n "$FILE_PATH" && -f "$FILE_PATH" ]]; then - # Run gofmt/goimports on the file silently - if command -v core &> /dev/null; then - core go fmt --fix "$FILE_PATH" 2>/dev/null || true - elif command -v goimports &> /dev/null; then - goimports -w "$FILE_PATH" 2>/dev/null || true - elif command -v gofmt &> /dev/null; then - gofmt -w "$FILE_PATH" 2>/dev/null || true - fi -fi - -# Pass through the input -echo "$input" diff --git a/.core/plugin/scripts/php-format.sh b/.core/plugin/scripts/php-format.sh deleted file mode 100755 index e0e7ec1..0000000 --- a/.core/plugin/scripts/php-format.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Auto-format PHP files after edits using core php fmt - -read -r input -FILE_PATH=$(echo "$input" | jq -r '.tool_input.file_path // empty') - -if [[ -n "$FILE_PATH" && -f "$FILE_PATH" ]]; then - # Run Pint on the file silently - if command -v core &> /dev/null; then - core php fmt --fix "$FILE_PATH" 2>/dev/null || true - elif [[ -f "./vendor/bin/pint" ]]; then - ./vendor/bin/pint "$FILE_PATH" 2>/dev/null || true - fi -fi - -# Pass through the input -echo "$input" diff --git a/.core/plugin/scripts/post-commit-check.sh b/.core/plugin/scripts/post-commit-check.sh deleted file mode 100755 index 42418b6..0000000 --- a/.core/plugin/scripts/post-commit-check.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/bash -# Post-commit hook: Check for uncommitted work that might get lost -# -# After committing task-specific files, check if there's other work -# in the repo that should be committed or stashed - -read -r input -COMMAND=$(echo "$input" | jq -r '.tool_input.command // empty') - -# Only run after git commit -if ! echo "$COMMAND" | grep -qE '^git commit'; then - echo "$input" - exit 0 -fi - -# Check for remaining uncommitted changes -UNSTAGED=$(git diff --name-only 2>/dev/null | wc -l | tr -d ' ') -STAGED=$(git diff --cached --name-only 2>/dev/null | wc -l | tr -d ' ') -UNTRACKED=$(git ls-files --others --exclude-standard 2>/dev/null | wc -l | tr -d ' ') - -TOTAL=$((UNSTAGED + STAGED + UNTRACKED)) - -if [[ $TOTAL -gt 0 ]]; then - echo "" >&2 - echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" >&2 - echo "[PostCommit] WARNING: Uncommitted work remains" >&2 - echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" >&2 - - if [[ $UNSTAGED -gt 0 ]]; then - echo " Modified (unstaged): $UNSTAGED files" >&2 - git diff --name-only 2>/dev/null | head -5 | sed 's/^/ /' >&2 - [[ $UNSTAGED -gt 5 ]] && echo " ... and $((UNSTAGED - 5)) more" >&2 - fi - - if [[ $STAGED -gt 0 ]]; then - echo " Staged (not committed): $STAGED files" >&2 - git diff --cached --name-only 2>/dev/null | head -5 | sed 's/^/ /' >&2 - fi - - if [[ $UNTRACKED -gt 0 ]]; then - echo " Untracked: $UNTRACKED files" >&2 - git ls-files --others --exclude-standard 2>/dev/null | head -5 | sed 's/^/ /' >&2 - [[ $UNTRACKED -gt 5 ]] && echo " ... and $((UNTRACKED - 5)) more" >&2 - fi - - echo "" >&2 - echo "Consider: commit these, stash them, or confirm they're intentionally left" >&2 - echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" >&2 -fi - -echo "$input" diff --git a/.core/plugin/scripts/pr-created.sh b/.core/plugin/scripts/pr-created.sh deleted file mode 100755 index 82dd975..0000000 --- a/.core/plugin/scripts/pr-created.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -# Log PR URL and provide review command after PR creation - -read -r input -COMMAND=$(echo "$input" | jq -r '.tool_input.command // empty') -OUTPUT=$(echo "$input" | jq -r '.tool_output.output // empty') - -if [[ "$COMMAND" == *"gh pr create"* ]]; then - PR_URL=$(echo "$OUTPUT" | grep -oE 'https://github.com/[^/]+/[^/]+/pull/[0-9]+' | head -1) - if [[ -n "$PR_URL" ]]; then - REPO=$(echo "$PR_URL" | sed -E 's|https://github.com/([^/]+/[^/]+)/pull/[0-9]+|\1|') - PR_NUM=$(echo "$PR_URL" | sed -E 's|.*/pull/([0-9]+)|\1|') - echo "[Hook] PR created: $PR_URL" >&2 - echo "[Hook] To review: gh pr review $PR_NUM --repo $REPO" >&2 - fi -fi - -echo "$input" diff --git a/.core/plugin/scripts/pre-compact.sh b/.core/plugin/scripts/pre-compact.sh deleted file mode 100755 index bb9d841..0000000 --- a/.core/plugin/scripts/pre-compact.sh +++ /dev/null @@ -1,69 +0,0 @@ -#!/bin/bash -# Pre-compact: Save minimal state for Claude to resume after auto-compact -# -# Captures: -# - Working directory + branch -# - Git status (files touched) -# - Todo state (in_progress items) -# - Context facts (decisions, actionables) - -STATE_FILE="${HOME}/.claude/sessions/scratchpad.md" -CONTEXT_FILE="${HOME}/.claude/sessions/context.json" -TIMESTAMP=$(date '+%s') -CWD=$(pwd) - -mkdir -p "${HOME}/.claude/sessions" - -# Get todo state -TODOS="" -if [[ -f "${HOME}/.claude/todos/current.json" ]]; then - TODOS=$(cat "${HOME}/.claude/todos/current.json" 2>/dev/null | head -50) -fi - -# Get git status -GIT_STATUS="" -BRANCH="" -if git rev-parse --git-dir > /dev/null 2>&1; then - GIT_STATUS=$(git status --short 2>/dev/null | head -15) - BRANCH=$(git branch --show-current 2>/dev/null) -fi - -# Get context facts -CONTEXT="" -if [[ -f "$CONTEXT_FILE" ]]; then - CONTEXT=$(jq -r '.[] | "- [\(.source)] \(.fact)"' "$CONTEXT_FILE" 2>/dev/null | tail -10) -fi - -cat > "$STATE_FILE" << EOF ---- -timestamp: ${TIMESTAMP} -cwd: ${CWD} -branch: ${BRANCH:-none} ---- - -# Resume After Compact - -You were mid-task. Do NOT assume work is complete. - -## Project -\`${CWD}\` on \`${BRANCH:-no branch}\` - -## Files Changed -\`\`\` -${GIT_STATUS:-none} -\`\`\` - -## Todos (in_progress = NOT done) -\`\`\`json -${TODOS:-check /todos} -\`\`\` - -## Context (decisions & actionables) -${CONTEXT:-none captured} - -## Next -Continue the in_progress todo. -EOF - -echo "[PreCompact] Snapshot saved" >&2 -exit 0 diff --git a/.core/plugin/scripts/session-start.sh b/.core/plugin/scripts/session-start.sh deleted file mode 100755 index 3a44d97..0000000 --- a/.core/plugin/scripts/session-start.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash -# Session start: Read scratchpad if recent, otherwise start fresh -# 3 hour window - if older, you've moved on mentally - -STATE_FILE="${HOME}/.claude/sessions/scratchpad.md" -THREE_HOURS=10800 # seconds - -if [[ -f "$STATE_FILE" ]]; then - # Get timestamp from file - FILE_TS=$(grep -E '^timestamp:' "$STATE_FILE" 2>/dev/null | cut -d' ' -f2) - NOW=$(date '+%s') - - if [[ -n "$FILE_TS" ]]; then - AGE=$((NOW - FILE_TS)) - - if [[ $AGE -lt $THREE_HOURS ]]; then - # Recent - read it back - echo "[SessionStart] Found recent scratchpad ($(($AGE / 60)) min ago)" >&2 - echo "[SessionStart] Reading previous state..." >&2 - echo "" >&2 - cat "$STATE_FILE" >&2 - echo "" >&2 - else - # Stale - delete and start fresh - rm -f "$STATE_FILE" - echo "[SessionStart] Previous session >3h old - starting fresh" >&2 - fi - else - # No timestamp, delete it - rm -f "$STATE_FILE" - fi -fi - -exit 0 diff --git a/.core/plugin/scripts/suggest-compact.sh b/.core/plugin/scripts/suggest-compact.sh deleted file mode 100755 index e958c50..0000000 --- a/.core/plugin/scripts/suggest-compact.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Suggest /compact at logical intervals to manage context window -# Tracks tool calls per session, suggests compaction every 50 calls - -SESSION_ID="${CLAUDE_SESSION_ID:-$$}" -COUNTER_FILE="/tmp/claude-tool-count-${SESSION_ID}" -THRESHOLD="${COMPACT_THRESHOLD:-50}" - -# Read or initialize counter -if [[ -f "$COUNTER_FILE" ]]; then - COUNT=$(($(cat "$COUNTER_FILE") + 1)) -else - COUNT=1 -fi - -echo "$COUNT" > "$COUNTER_FILE" - -# Suggest compact at threshold -if [[ $COUNT -eq $THRESHOLD ]]; then - echo "[Compact] ${THRESHOLD} tool calls - consider /compact if transitioning phases" >&2 -fi - -# Suggest at intervals after threshold -if [[ $COUNT -gt $THRESHOLD ]] && [[ $((COUNT % 25)) -eq 0 ]]; then - echo "[Compact] ${COUNT} tool calls - good checkpoint for /compact" >&2 -fi - -exit 0 diff --git a/.core/plugin/skills/core.md b/.core/plugin/skills/core.md deleted file mode 100644 index 966d7e9..0000000 --- a/.core/plugin/skills/core.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -name: core -description: Use when working in host-uk repositories, running tests, building, releasing, or managing multi-repo workflows. Provides the core CLI command reference. ---- - -# Core CLI - -The `core` command provides a unified interface for Go/PHP development and multi-repo management. - -**Rule:** Always prefer `core ` over raw commands. - -## Quick Reference - -| Task | Command | -|------|---------| -| Go tests | `core go test` | -| Go coverage | `core go cov` | -| Go format | `core go fmt --fix` | -| Go lint | `core go lint` | -| PHP dev server | `core php dev` | -| PHP tests | `core php test` | -| PHP format | `core php fmt --fix` | -| Build | `core build` | -| Preview release | `core ci` | -| Publish | `core ci --were-go-for-launch` | -| Multi-repo status | `core dev health` | -| Commit dirty repos | `core dev commit` | -| Push repos | `core dev push` | - -## Decision Tree - -``` -Go project? - tests: core go test - format: core go fmt --fix - build: core build - -PHP project? - dev: core php dev - tests: core php test - format: core php fmt --fix - deploy: core php deploy - -Multiple repos? - status: core dev health - commit: core dev commit - push: core dev push -``` - -## Common Mistakes - -| Wrong | Right | -|-------|-------| -| `go test ./...` | `core go test` | -| `go build` | `core build` | -| `php artisan serve` | `core php dev` | -| `./vendor/bin/pest` | `core php test` | -| `git status` per repo | `core dev health` | - -Run `core --help` or `core --help` for full options. diff --git a/.core/plugin/skills/go.md b/.core/plugin/skills/go.md deleted file mode 100644 index 22a2227..0000000 --- a/.core/plugin/skills/go.md +++ /dev/null @@ -1,107 +0,0 @@ ---- -name: core-go -description: Use when creating Go packages or extending the core CLI. ---- - -# Go Framework Patterns - -Core CLI uses `pkg/` for reusable packages. Use `core go` commands. - -## Package Structure - -``` -core/ -├── main.go # CLI entry point -├── pkg/ -│ ├── cli/ # CLI framework, output, errors -│ ├── {domain}/ # Domain package -│ │ ├── cmd_{name}.go # Cobra command definitions -│ │ ├── service.go # Business logic -│ │ └── *_test.go # Tests -│ └── ... -└── internal/ # Private packages -``` - -## Adding a CLI Command - -1. Create `pkg/{domain}/cmd_{name}.go`: - -```go -package domain - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/spf13/cobra" -) - -func NewNameCmd() *cobra.Command { - cmd := &cobra.Command{ - Use: "name", - Short: cli.T("domain.name.short"), - RunE: func(cmd *cobra.Command, args []string) error { - // Implementation - cli.Success("Done") - return nil - }, - } - return cmd -} -``` - -2. Register in parent command. - -## CLI Output Helpers - -```go -import "github.com/host-uk/core/pkg/cli" - -cli.Success("Operation completed") // Green check -cli.Warning("Something to note") // Yellow warning -cli.Error("Something failed") // Red error -cli.Info("Informational message") // Blue info -cli.Fatal(err) // Print error and exit 1 - -// Structured output -cli.Table(headers, rows) -cli.JSON(data) -``` - -## i18n Pattern - -```go -// Use cli.T() for translatable strings -cli.T("domain.action.success") -cli.T("domain.action.error", "details", value) - -// Define in pkg/i18n/locales/en.yaml: -domain: - action: - success: "Operation completed successfully" - error: "Failed: {{.details}}" -``` - -## Test Naming - -```go -func TestFeature_Good(t *testing.T) { /* happy path */ } -func TestFeature_Bad(t *testing.T) { /* expected errors */ } -func TestFeature_Ugly(t *testing.T) { /* panics, edge cases */ } -``` - -## Commands - -| Task | Command | -|------|---------| -| Run tests | `core go test` | -| Coverage | `core go cov` | -| Format | `core go fmt --fix` | -| Lint | `core go lint` | -| Build | `core build` | -| Install | `core go install` | - -## Rules - -- `CGO_ENABLED=0` for all builds -- UK English in user-facing strings -- All errors via `cli.E("context", "message", err)` -- Table-driven tests preferred diff --git a/.core/plugin/skills/php.md b/.core/plugin/skills/php.md deleted file mode 100644 index 2133a20..0000000 --- a/.core/plugin/skills/php.md +++ /dev/null @@ -1,120 +0,0 @@ ---- -name: core-php -description: Use when creating PHP modules, services, or actions in core-* packages. ---- - -# PHP Framework Patterns - -Host UK PHP modules follow strict conventions. Use `core php` commands. - -## Module Structure - -``` -core-{name}/ -├── src/ -│ ├── Core/ # Namespace: Core\{Name} -│ │ ├── Boot.php # Module bootstrap (listens to lifecycle events) -│ │ ├── Actions/ # Single-purpose business logic -│ │ └── Models/ # Eloquent models -│ └── Mod/ # Namespace: Core\Mod\{Name} (optional extensions) -├── resources/views/ # Blade templates -├── routes/ # Route definitions -├── database/migrations/ # Migrations -├── tests/ # Pest tests -└── composer.json -``` - -## Boot Class Pattern - -```php - 'onWebRoutes', - AdminPanelBooting::class => ['onAdmin', 10], // With priority - ]; - - public function onWebRoutes(WebRoutesRegistering $event): void - { - $event->router->middleware('web')->group(__DIR__ . '/../routes/web.php'); - } - - public function onAdmin(AdminPanelBooting $event): void - { - $event->panel->resources([...]); - } -} -``` - -## Action Pattern - -```php - $user->id, - ...$data, - ]); - } -} - -// Usage: CreateThing::run($user, $validated); -``` - -## Multi-Tenant Models - -```php - - e.event === 'labeled' && e.label?.name === 'agent:wip' - ); - - const implementer = wipEvent?.actor?.login || 'unknown'; - const verifier = context.payload.sender.login; - - console.log(`Implementer: ${implementer}`); - console.log(`Verifier: ${verifier}`); - - if (implementer === verifier) { - core.setFailed(`Self-verification not allowed. ${verifier} cannot verify their own work.`); - } - - return { implementer, verifier }; - - - name: Record verification - if: success() - uses: actions/github-script@v7 - with: - script: | - const label = context.payload.label.name; - const verifier = context.payload.sender.login; - const status = label === 'verified' ? '✅ Verified' : '❌ Failed'; - - await github.rest.issues.createComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.issue.number, - body: `## ${status}\n\nVerified by @${verifier}` - }); - - // Remove agent:review label - try { - await github.rest.issues.removeLabel({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.issue.number, - name: 'agent:review' - }); - } catch (e) { - console.log('agent:review label not present'); - } - - # If verification failed, reset for rework - handle-failure: - if: github.event.label.name == 'verify-failed' - runs-on: ubuntu-latest - needs: check-verification - steps: - - name: Reset for rework - uses: actions/github-script@v7 - with: - script: | - // Remove verify-failed after processing - await github.rest.issues.removeLabel({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.issue.number, - name: 'verify-failed' - }); - - // Add back to ready queue - await github.rest.issues.addLabels({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.issue.number, - labels: ['agent:ready'] - }); diff --git a/.github/workflows/auto-label.yml b/.github/workflows/auto-label.yml deleted file mode 100644 index 936c307..0000000 --- a/.github/workflows/auto-label.yml +++ /dev/null @@ -1,115 +0,0 @@ -name: Auto Label Issues - -on: - issues: - types: [opened, edited] - -permissions: - issues: write - -jobs: - label: - runs-on: ubuntu-latest - steps: - - name: Auto-label based on content - uses: actions/github-script@v7 - with: - script: | - const issue = context.payload.issue; - const title = issue.title.toLowerCase(); - const body = (issue.body || '').toLowerCase(); - const content = title + ' ' + body; - - const labelsToAdd = []; - - // Type labels based on title prefix - if (title.includes('[bug]')) { - labelsToAdd.push('bug'); - } else if (title.includes('[feature]') || title.includes('feat(') || title.includes('feat:')) { - labelsToAdd.push('enhancement'); - } else if (title.includes('[docs]') || title.includes('docs(') || title.includes('docs:')) { - labelsToAdd.push('documentation'); - } - - // Project labels based on content - if (content.includes('core dev') || content.includes('core work') || content.includes('core commit') || content.includes('core push')) { - labelsToAdd.push('project:core-cli'); - } - if (content.includes('core php') || content.includes('composer') || content.includes('pest') || content.includes('phpstan')) { - labelsToAdd.push('project:core-php'); - } - - // Language labels - if (content.includes('.go') || content.includes('golang') || content.includes('go mod')) { - labelsToAdd.push('go'); - } - if (content.includes('.php') || content.includes('laravel') || content.includes('composer')) { - // Skip - already handled by project:core-php - } - - // Priority detection - if (content.includes('critical') || content.includes('urgent') || content.includes('breaking')) { - labelsToAdd.push('priority:high'); - } - - // Agent labels - if (content.includes('agent') || content.includes('ai ') || content.includes('claude') || content.includes('agentic')) { - labelsToAdd.push('agentic'); - } - - // Complexity - from template dropdown or heuristics - if (body.includes('small - quick fix')) { - labelsToAdd.push('complexity:small'); - labelsToAdd.push('good first issue'); - } else if (body.includes('medium - multiple files')) { - labelsToAdd.push('complexity:medium'); - } else if (body.includes('large - significant')) { - labelsToAdd.push('complexity:large'); - } else if (!body.includes('unknown - not sure')) { - // Heuristic complexity detection - const checklistCount = (body.match(/- \[ \]/g) || []).length; - const codeBlocks = (body.match(/```/g) || []).length / 2; - const sections = (body.match(/^##/gm) || []).length; - const fileRefs = (body.match(/\.(go|php|js|ts|yml|yaml|json|md)\b/g) || []).length; - - const complexKeywords = ['refactor', 'rewrite', 'migration', 'breaking change', 'across repos', 'architecture']; - const simpleKeywords = ['simple', 'quick fix', 'typo', 'minor', 'trivial']; - - const hasComplexKeyword = complexKeywords.some(k => content.includes(k)); - const hasSimpleKeyword = simpleKeywords.some(k => content.includes(k)); - - let score = checklistCount * 2 + codeBlocks + sections + fileRefs; - score += hasComplexKeyword ? 5 : 0; - score -= hasSimpleKeyword ? 3 : 0; - - if (hasSimpleKeyword || score <= 2) { - labelsToAdd.push('complexity:small'); - labelsToAdd.push('good first issue'); - } else if (score <= 6) { - labelsToAdd.push('complexity:medium'); - } else { - labelsToAdd.push('complexity:large'); - } - } - - // Apply labels if any detected - if (labelsToAdd.length > 0) { - // Filter to only existing labels - const existingLabels = await github.rest.issues.listLabelsForRepo({ - owner: context.repo.owner, - repo: context.repo.repo, - per_page: 100 - }); - const validLabels = existingLabels.data.map(l => l.name); - const filteredLabels = labelsToAdd.filter(l => validLabels.includes(l)); - - if (filteredLabels.length > 0) { - await github.rest.issues.addLabels({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: issue.number, - labels: filteredLabels - }); - console.log(`Added labels: ${filteredLabels.join(', ')}`); - } - } diff --git a/.github/workflows/auto-project.yml b/.github/workflows/auto-project.yml deleted file mode 100644 index 2eded32..0000000 --- a/.github/workflows/auto-project.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: Auto-add to Project - -on: - issues: - types: [opened, labeled] - -jobs: - add-to-project: - runs-on: ubuntu-latest - steps: - - name: Add to Workstation (agentic label) - if: contains(github.event.issue.labels.*.name, 'agentic') - uses: actions/add-to-project@v1.0.2 - with: - project-url: https://github.com/orgs/host-uk/projects/2 - github-token: ${{ secrets.PROJECT_TOKEN }} - - - name: Add to Core.GO (lang:go label) - if: contains(github.event.issue.labels.*.name, 'lang:go') - uses: actions/add-to-project@v1.0.2 - with: - project-url: https://github.com/orgs/host-uk/projects/4 - github-token: ${{ secrets.PROJECT_TOKEN }} - - - name: Add to Core.Framework (scope:arch label) - if: contains(github.event.issue.labels.*.name, 'scope:arch') - uses: actions/add-to-project@v1.0.2 - with: - project-url: https://github.com/orgs/host-uk/projects/1 - github-token: ${{ secrets.PROJECT_TOKEN }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index df471c6..9f1adf4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,23 +2,25 @@ name: CI on: push: - branches: ["main"] - pull_request: - branches: ["main"] + branches: [main] jobs: - build: + test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v4 - - name: Set up Go - uses: actions/setup-go@v4 - with: - go-version: 1.22 + - uses: actions/setup-go@v5 + with: + go-version-file: go.mod - - name: Install dependencies - run: go mod tidy + - name: Run tests with coverage + run: | + go test -coverprofile=coverage.out ./tests/... + sed -i 's|dappco.re/go/core/||g' coverage.out - - name: Run tests - run: go test ./... + - name: Upload to Codecov + uses: codecov/codecov-action@v5 + with: + files: coverage.out + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml deleted file mode 100644 index 8a1025f..0000000 --- a/.github/workflows/codeql.yml +++ /dev/null @@ -1,36 +0,0 @@ -name: CodeQL - -on: - push: - branches: [dev, main] - pull_request: - branches: [dev, main] - schedule: - - cron: "0 6 * * 1" - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Initialize CodeQL - uses: github/codeql-action/init@v3 - with: - languages: go - - - name: Autobuild - uses: github/codeql-action/autobuild@v3 - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 - with: - category: "/language:go" - diff --git a/.github/workflows/codescan.yml b/.github/workflows/codescan.yml deleted file mode 100644 index 0cd58df..0000000 --- a/.github/workflows/codescan.yml +++ /dev/null @@ -1,36 +0,0 @@ -name: "Code Scanning" - -on: - push: - branches: ["dev"] - pull_request: - branches: ["dev"] - schedule: - - cron: "0 2 * * 1-5" - -jobs: - CodeQL: - runs-on: ubuntu-latest - - permissions: - # required for all workflows - security-events: write - - # only required for workflows in private repositories - actions: read - contents: read - - steps: - - name: "Checkout Repository" - uses: actions/checkout@v4 - - - name: "Initialize CodeQL" - uses: github/codeql-action/init@v3 - with: - languages: go,javascript,typescript - - - name: "Autobuild" - uses: github/codeql-action/autobuild@v3 - - - name: "Perform CodeQL Analysis" - uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml deleted file mode 100644 index 2a95ec5..0000000 --- a/.github/workflows/coverage.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: Go Test Coverage - -on: - push: - branches: [dev, main] - pull_request: - branches: [dev, main] - -jobs: - coverage: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Set up Go - uses: actions/setup-go@v6 - with: - go-version-file: 'go.mod' - - - name: Setup Task - uses: arduino/setup-task@v1 - - - name: Install dependencies - run: | - sudo apt-get update - sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.1-dev - - - name: Build CLI - run: | - go generate ./pkg/updater/... - task cli:build - echo "$(pwd)/bin" >> $GITHUB_PATH - - - name: Run coverage - run: task cov - - - name: Upload coverage reports to Codecov - uses: codecov/codecov-action@v5 - with: - token: ${{ secrets.CODECOV_TOKEN }} - - - name: Upload coverage report - uses: actions/upload-artifact@v4 - with: - name: coverage-report - path: coverage.txt diff --git a/.github/workflows/dev-release.yml b/.github/workflows/dev-release.yml deleted file mode 100644 index a718f45..0000000 --- a/.github/workflows/dev-release.yml +++ /dev/null @@ -1,94 +0,0 @@ -name: Dev Release - -on: - push: - branches: [dev] - workflow_dispatch: - -permissions: - contents: write - -jobs: - build: - runs-on: ubuntu-latest - strategy: - matrix: - include: - - goos: linux - goarch: amd64 - - goos: linux - goarch: arm64 - - goos: darwin - goarch: amd64 - - goos: darwin - goarch: arm64 - - goos: windows - goarch: amd64 - - goos: windows - goarch: arm64 - - steps: - - uses: actions/checkout@v4 - - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: '1.24' - check-latest: true - - - name: Build CLI - env: - GOOS: ${{ matrix.goos }} - GOARCH: ${{ matrix.goarch }} - CGO_ENABLED: '0' - run: | - EXT="" - if [ "$GOOS" = "windows" ]; then EXT=".exe"; fi - VERSION="dev-$(git rev-parse --short HEAD)" - go build -trimpath -ldflags="-s -w -X github.com/host-uk/core/pkg/cli.AppVersion=${VERSION}" -o core-${GOOS}-${GOARCH}${EXT} . - - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: core-${{ matrix.goos }}-${{ matrix.goarch }} - path: core-* - - release: - needs: build - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Download all artifacts - uses: actions/download-artifact@v4 - with: - path: artifacts - merge-multiple: true - - - name: List artifacts - run: ls -la artifacts/ - - - name: Delete existing dev release - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: gh release delete dev -y || true - - - name: Delete existing dev tag - run: git push origin :refs/tags/dev || true - - - name: Create dev release - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - COMMIT_SHA: ${{ github.sha }} - run: | - gh release create dev \ - --title "Development Build" \ - --notes "Latest development build from the dev branch. - - **Commit:** ${COMMIT_SHA} - **Built:** $(date -u +'%Y-%m-%d %H:%M:%S UTC') - - This is a pre-release for testing. Use tagged releases for production." \ - --prerelease \ - --target dev \ - artifacts/* diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index 4ba585c..0000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,86 +0,0 @@ -name: Release - -on: - push: - tags: - - 'v*.*.*' - -permissions: - contents: write - -jobs: - build: - runs-on: ubuntu-latest - strategy: - matrix: - include: - - goos: linux - goarch: amd64 - - goos: linux - goarch: arm64 - - goos: darwin - goarch: amd64 - - goos: darwin - goarch: arm64 - - goos: windows - goarch: amd64 - - goos: windows - goarch: arm64 - - steps: - - uses: actions/checkout@v4 - - - name: Set up Go - uses: actions/setup-go@v5 - with: - go-version: '1.24' - check-latest: true - - - name: Get version from tag - id: version - run: echo "VERSION=${GITHUB_REF_NAME}" >> $GITHUB_OUTPUT - - - name: Build CLI - env: - GOOS: ${{ matrix.goos }} - GOARCH: ${{ matrix.goarch }} - CGO_ENABLED: '0' - run: | - EXT="" - if [ "$GOOS" = "windows" ]; then EXT=".exe"; fi - go build -trimpath \ - -ldflags="-s -w -X github.com/host-uk/core/pkg/cli.AppVersion=${{ steps.version.outputs.VERSION }}" \ - -o core-${GOOS}-${GOARCH}${EXT} . - - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: core-${{ matrix.goos }}-${{ matrix.goarch }} - path: core-* - - release: - needs: build - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Download all artifacts - uses: actions/download-artifact@v4 - with: - path: artifacts - merge-multiple: true - - - name: Generate checksums - run: | - cd artifacts - sha256sum core-* > checksums.txt - cat checksums.txt - - - name: Create release - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - gh release create ${{ github.ref_name }} \ - --title "${{ github.ref_name }}" \ - --generate-notes \ - artifacts/* diff --git a/.gitignore b/.gitignore index f36a48f..aed5470 100644 --- a/.gitignore +++ b/.gitignore @@ -13,7 +13,18 @@ coverage.html *.cache /coverage.txt bin/ +dist/ tasks /core +/i18n-validate +/validate +cmd/* +!cmd/gocmd/ +.angular/ +patch_cov.* +go.work.sum +lt-hn-index.html +.core/workspace/ +.idea/ diff --git a/.mcp.json b/.mcp.json new file mode 100644 index 0000000..fe40be8 --- /dev/null +++ b/.mcp.json @@ -0,0 +1,9 @@ +{ + "mcpServers": { + "core": { + "type": "stdio", + "command": "core-agent", + "args": ["mcp"] + } + } +} diff --git a/CLAUDE.md b/CLAUDE.md index a9b5d2b..22c84b1 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,102 +1,96 @@ # CLAUDE.md -This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. +Guidance for Claude Code and Codex when working with this repository. -## Project Overview +## Module -Core is a Web3 Framework written in Go using Wails v3 to replace Electron for desktop applications. It provides a dependency injection framework for managing services with lifecycle support. +`dappco.re/go/core` — dependency injection, service lifecycle, command routing, and message-passing for Go. -## Build & Development Commands +Source files live at the module root (not `pkg/core/`). Tests live in `tests/`. -This project uses [Task](https://taskfile.dev/) for automation. Key commands: +## Build & Test ```bash -# Run all tests -task test - -# Generate test coverage -task cov -task cov-view # Opens coverage HTML report - -# GUI application (Wails) -task gui:dev # Development mode with hot-reload -task gui:build # Production build - -# CLI application -task cli:build # Build CLI -task cli:run # Build and run CLI - -# Code review -task review # Submit for CodeRabbit review -task check # Run mod tidy + tests + review +go test ./tests/... # run all tests +go build . # verify compilation +GOWORK=off go test ./tests/ # test without workspace ``` -Run a single test: `go test -run TestName ./...` +Or via the Core CLI: -## Architecture +```bash +core go test +core go qa # fmt + vet + lint + test +``` -### Core Framework (`core.go`, `interfaces.go`) +## API Shape -The `Core` struct is the central application container managing: -- **Services**: Named service registry with type-safe retrieval via `ServiceFor[T]()` and `MustServiceFor[T]()` -- **Actions/IPC**: Message-passing system where services communicate via `ACTION(msg Message)` and register handlers via `RegisterAction()` -- **Lifecycle**: Services implementing `Startable` (OnStartup) and/or `Stoppable` (OnShutdown) interfaces are automatically called during app lifecycle +CoreGO uses the DTO/Options/Result pattern, not functional options: -Creating a Core instance: ```go -core, err := core.New( - core.WithService(myServiceFactory), - core.WithAssets(assets), - core.WithServiceLock(), // Prevents late service registration -) +c := core.New(core.Options{ + {Key: "name", Value: "myapp"}, +}) + +c.Service("cache", core.Service{ + OnStart: func() core.Result { return core.Result{OK: true} }, + OnStop: func() core.Result { return core.Result{OK: true} }, +}) + +c.Command("deploy/to/homelab", core.Command{ + Action: func(opts core.Options) core.Result { + return core.Result{Value: "deployed", OK: true} + }, +}) + +r := c.Cli().Run("deploy", "to", "homelab") ``` -### Service Registration Pattern +**Do not use:** `WithService`, `WithName`, `WithApp`, `WithServiceLock`, `Must*`, `ServiceFor[T]` — these no longer exist. -Services are registered via factory functions that receive the Core instance: -```go -func NewMyService(c *core.Core) (any, error) { - return &MyService{runtime: core.NewServiceRuntime(c, opts)}, nil -} +## Subsystems -core.New(core.WithService(NewMyService)) -``` +| Accessor | Returns | Purpose | +|----------|---------|---------| +| `c.Options()` | `*Options` | Input configuration | +| `c.App()` | `*App` | Application identity | +| `c.Data()` | `*Data` | Embedded filesystem mounts | +| `c.Drive()` | `*Drive` | Named transport handles | +| `c.Fs()` | `*Fs` | Local filesystem I/O | +| `c.Config()` | `*Config` | Runtime settings | +| `c.Cli()` | `*Cli` | CLI surface | +| `c.Command("path")` | `Result` | Command tree | +| `c.Service("name")` | `Result` | Service registry | +| `c.Lock("name")` | `*Lock` | Named mutexes | +| `c.IPC()` | `*Ipc` | Message bus | +| `c.I18n()` | `*I18n` | Locale + translation | -- `WithService`: Auto-discovers service name from package path, registers IPC handler if service has `HandleIPCEvents` method -- `WithName`: Explicitly names a service +## Messaging -### Runtime (`runtime_pkg.go`) +| Method | Pattern | +|--------|---------| +| `c.ACTION(msg)` | Broadcast to all handlers | +| `c.QUERY(q)` | First responder wins | +| `c.QUERYALL(q)` | Collect all responses | +| `c.PERFORM(task)` | First executor wins | +| `c.PerformAsync(task)` | Background goroutine | -`Runtime` is the Wails service wrapper that bootstraps the Core and its services. Use `NewWithFactories()` for custom service registration or `NewRuntime()` for basic setup. +## Error Handling -### ServiceRuntime Generic Helper (`runtime.go`) +Use `core.E()` for structured errors: -Embed `ServiceRuntime[T]` in services to get access to Core and typed options: -```go -type MyService struct { - *core.ServiceRuntime[MyServiceOptions] -} -``` - -### Error Handling (`e.go`) - -Use the `E()` helper for contextual errors: ```go return core.E("service.Method", "what failed", underlyingErr) ``` -### Test Naming Convention +## Test Naming -Tests use `_Good`, `_Bad`, `_Ugly` suffix pattern: -- `_Good`: Happy path tests -- `_Bad`: Expected error conditions -- `_Ugly`: Panic/edge cases +`_Good` (happy path), `_Bad` (expected errors), `_Ugly` (panics/edge cases). + +## Docs + +Full documentation in `docs/`. Start with `docs/getting-started.md`. ## Go Workspace -Uses Go 1.25 workspaces. The workspace includes: -- Root module (Core framework) -- `cmd/core-gui` (Wails GUI application) -- `cmd/examples/*` (Example applications) - -After adding modules: `go work sync` \ No newline at end of file +Part of `~/Code/go.work`. Use `GOWORK=off` to test in isolation. diff --git a/GEMINI.md b/GEMINI.md deleted file mode 100644 index 30a96e5..0000000 --- a/GEMINI.md +++ /dev/null @@ -1,55 +0,0 @@ -# GEMINI.md - -This file provides guidance for agentic interactions within this repository, specifically for Gemini and other MCP-compliant agents. - -## Agentic Context & MCP - -This project is built with an **Agentic** design philosophy. It is not exclusive to any single LLM provider (like Claude). - -- **MCP Support**: The system is designed to leverage the Model Context Protocol (MCP) to provide rich context and tools to agents. -- **Developer Image**: You are running within a standardized developer image (`host-uk/core` dev environment), ensuring consistent tooling and configuration. - -## Core CLI (Agent Interface) - -The `core` command is the primary interface for agents to manage the project. Agents should **always** prefer `core` commands over raw shell commands (like `go test`, `php artisan`, etc.). - -### Key Commands for Agents - -| Task | Command | Notes | -|------|---------|-------| -| **Health Check** | `core doctor` | Verify tools and environment | -| **Repo Status** | `core dev health` | Quick summary of all repos | -| **Work Status** | `core dev work --status` | Detailed dirty/ahead status | -| **Run Tests** | `core go test` | Run Go tests with correct flags | -| **Coverage** | `core go cov` | Generate coverage report | -| **Build** | `core build` | Build the project safely | -| **Search Code** | `core pkg search` | Find packages/repos | - -## Project Architecture - -Core is a Web3 Framework written in Go using Wails v3. - -### Core Framework - -- **Services**: Managed via dependency injection (`ServiceFor[T]()`). -- **Lifecycle**: `OnStartup` and `OnShutdown` hooks. -- **IPC**: Message-passing system for service communication. - -### Development Workflow - -1. **Check State**: `core dev work --status` -2. **Make Changes**: Modify code, add tests. -3. **Verify**: `core go test` (or `core php test` for PHP components). -4. **Commit**: `core dev commit` (or standard git if automated). -5. **Push**: `core dev push` (handles multiple repos). - -## Testing Standards - -- **Suffix Pattern**: - - `_Good`: Happy path - - `_Bad`: Expected errors - - `_Ugly`: Edge cases/panics - -## Go Workspace - -The project uses Go workspaces (`go.work`). Always run `core go work sync` after modifying modules. diff --git a/Makefile b/Makefile deleted file mode 100644 index 7dcdd42..0000000 --- a/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -.PHONY: all dev prod-docs development-docs - -all: - (cd cmd/core-gui && task build) - -.ONESHELL: -dev: - (cd cmd/core-gui && task dev) - -pre-commit: - coderabbit review --prompt-only - -development-docs: - @echo "Running development documentation Website..." - @(cd pkg/core/docs && mkdocs serve -w src) - -prod-docs: - @echo "Generating documentation tp Repo Root..." - @(cd pkg/core/docs && mkdocs build -d public && cp -r src public) - @echo "Documentation generated at docs/index.html" \ No newline at end of file diff --git a/README.md b/README.md index 6b1374d..eb2c12b 100644 --- a/README.md +++ b/README.md @@ -1,348 +1,151 @@ -# Core +# CoreGO -Core is a Web3 Framework, written in Go using Wails.io to replace Electron and the bloat of browsers that, at their core, still live in their mum's basement. +Dependency injection, service lifecycle, command routing, and message-passing for Go. -- Discord: http://discord.dappco.re -- Repo: https://github.com/Snider/Core - -## Vision - -Core is an **opinionated Web3 desktop application framework** providing: - -1. **Service-Oriented Architecture** - Pluggable services with dependency injection -2. **Encrypted Workspaces** - Each workspace gets its own PGP keypair, files are obfuscated -3. **Cross-Platform Storage** - Abstract storage backends (local, SFTP, WebDAV) behind a `Medium` interface -4. **Multi-Brand Support** - Same codebase powers different "hub" apps (AdminHub, ServerHub, GatewayHub, DeveloperHub, ClientHub) -5. **Built-in Crypto** - PGP encryption/signing, hashing, checksums as first-class citizens - -**Mental model:** A secure, encrypted workspace manager where each "workspace" is a cryptographically isolated environment. The framework handles windows, menus, trays, config, and i18n. - -## Quick Start +Import path: ```go -import core "github.com/Snider/Core" +import "dappco.re/go/core" +``` -app := core.New( - core.WithServiceLock(), +CoreGO is the foundation layer for the Core ecosystem. It gives you: + +- one container: `Core` +- one input shape: `Options` +- one output shape: `Result` +- one command tree: `Command` +- one message bus: `ACTION`, `QUERY`, `PERFORM` + +## Why It Exists + +Most non-trivial Go systems end up needing the same small set of infrastructure: + +- a place to keep runtime state and shared subsystems +- a predictable way to start and stop managed components +- a clean command surface for CLI-style workflows +- decoupled communication between components without tight imports + +CoreGO keeps those pieces small and explicit. + +## Quick Example + +```go +package main + +import ( + "context" + "fmt" + + "dappco.re/go/core" ) + +type flushCacheTask struct { + Name string +} + +func main() { + c := core.New(core.Options{ + {Key: "name", Value: "agent-workbench"}, + }) + + c.Service("cache", core.Service{ + OnStart: func() core.Result { + core.Info("cache started", "app", c.App().Name) + return core.Result{OK: true} + }, + OnStop: func() core.Result { + core.Info("cache stopped", "app", c.App().Name) + return core.Result{OK: true} + }, + }) + + c.RegisterTask(func(_ *core.Core, task core.Task) core.Result { + switch t := task.(type) { + case flushCacheTask: + return core.Result{Value: "cache flushed for " + t.Name, OK: true} + } + return core.Result{} + }) + + c.Command("cache/flush", core.Command{ + Action: func(opts core.Options) core.Result { + return c.PERFORM(flushCacheTask{ + Name: opts.String("name"), + }) + }, + }) + + if !c.ServiceStartup(context.Background(), nil).OK { + panic("startup failed") + } + + r := c.Cli().Run("cache", "flush", "--name=session-store") + fmt.Println(r.Value) + + _ = c.ServiceShutdown(context.Background()) +} ``` -## Prerequisites +## Core Surfaces -- [Go](https://go.dev/) 1.25+ -- [Node.js](https://nodejs.org/) -- [Wails](https://wails.io/) v3 -- [Task](https://taskfile.dev/) +| Surface | Purpose | +|---------|---------| +| `Core` | Central container and access point | +| `Service` | Managed lifecycle component | +| `Command` | Path-based executable operation | +| `Cli` | CLI surface over the command tree | +| `Data` | Embedded filesystem mounts | +| `Drive` | Named transport handles | +| `Fs` | Local filesystem operations | +| `Config` | Runtime settings and feature flags | +| `I18n` | Locale collection and translation delegation | +| `E`, `Wrap`, `ErrorLog`, `ErrorPanic` | Structured failures and panic recovery | -## Development Workflow (TDD) +## AX-Friendly Model + +CoreGO follows the same design direction as the AX spec: + +- predictable names over compressed names +- paths as documentation, such as `deploy/to/homelab` +- one repeated vocabulary across the framework +- examples that show how to call real APIs + +## Install ```bash -task test-gen # 1. Generate test stubs -task test # 2. Run tests (watch them fail) -# 3. Implement your feature -task test # 4. Run tests (watch them pass) -task review # 5. CodeRabbit review +go get dappco.re/go/core ``` -## Building & Running +Requires Go 1.26 or later. + +## Test ```bash -# GUI (Wails) -task gui:dev # Development with hot-reload -task gui:build # Production build - -# CLI -task cli:build # Build to cmd/core/bin/core -task cli:run # Build and run +core go test ``` -## All Tasks - -| Task | Description | -|------|-------------| -| `task test` | Run all Go tests | -| `task test-gen` | Generate test stubs for public API | -| `task check` | go mod tidy + tests + review | -| `task review` | CodeRabbit review | -| `task cov` | Generate coverage.txt | -| `task cov-view` | Open HTML coverage report | -| `task sync` | Update public API Go files | - ---- - -## Architecture - -### Project Structure - -``` -. -├── core.go # Facade re-exporting pkg/core -├── pkg/ -│ ├── core/ # Service container, DI, Runtime[T] -│ ├── config/ # JSON persistence, XDG paths -│ ├── display/ # Windows, tray, menus (Wails) -│ ├── crypt/ # Hashing, checksums, PGP -│ │ └── openpgp/ # Full PGP implementation -│ ├── io/ # Medium interface + backends -│ ├── workspace/ # Encrypted workspace management -│ ├── help/ # In-app documentation -│ └── i18n/ # Internationalization -├── cmd/ -│ ├── core/ # CLI application -│ └── core-gui/ # Wails GUI application -└── go.work # Links root, cmd/core, cmd/core-gui -``` - -### Service Pattern (Dual-Constructor DI) - -Every service follows this pattern: - -```go -// Static DI - standalone use/testing (no core.Runtime) -func New() (*Service, error) - -// Dynamic DI - for core.WithService() registration -func Register(c *core.Core) (any, error) -``` - -Services embed `*core.Runtime[Options]` for access to `Core()` and `Config()`. - -### IPC/Action System - -Services implement `HandleIPCEvents(c *core.Core, msg core.Message) error` - auto-discovered via reflection. Handles typed actions like `core.ActionServiceStartup`. - ---- - -## Wails v3 Frontend Bindings - -Core uses [Wails v3](https://v3alpha.wails.io/) to expose Go methods to a WebView2 browser runtime. Wails automatically generates TypeScript bindings for registered services. - -**Documentation:** [Wails v3 Method Bindings](https://v3alpha.wails.io/features/bindings/methods/) - -### How It Works - -1. **Go services** with exported methods are registered with Wails -2. Run `wails3 generate bindings` (or `wails3 dev` / `wails3 build`) -3. **TypeScript SDK** is generated in `frontend/bindings/` -4. Frontend calls Go methods with full type safety, no HTTP overhead - -### Current Binding Architecture - -```go -// cmd/core-gui/main.go -app.RegisterService(application.NewService(coreService)) // Only Core is registered -``` - -**Problem:** Only `Core` is registered with Wails. Sub-services (crypt, workspace, display, etc.) are internal to Core's service map - their methods aren't directly exposed to JS. - -**Currently exposed** (see `cmd/core-gui/public/bindings/`): -```typescript -// From frontend: -import { ACTION, Config, Service } from './bindings/github.com/Snider/Core/pkg/core' - -ACTION(msg) // Broadcast IPC message -Config() // Get config service reference -Service("workspace") // Get service by name (returns any) -``` - -**NOT exposed:** Direct calls like `workspace.CreateWorkspace()` or `crypt.Hash()`. - -### The IPC Bridge Pattern (Chosen Architecture) - -Sub-services are accessed via Core's **IPC/ACTION system**, not direct Wails bindings: - -```typescript -// Frontend calls Core.ACTION() with typed messages -import { ACTION } from './bindings/github.com/Snider/Core/pkg/core' - -// Open a window -ACTION({ action: "display.open_window", name: "settings", options: { Title: "Settings", Width: 800 } }) - -// Switch workspace -ACTION({ action: "workspace.switch_workspace", name: "myworkspace" }) -``` - -Each service implements `HandleIPCEvents(c *core.Core, msg core.Message)` to process these messages: - -```go -// pkg/display/display.go -func (s *Service) HandleIPCEvents(c *core.Core, msg core.Message) error { - switch m := msg.(type) { - case map[string]any: - if action, ok := m["action"].(string); ok && action == "display.open_window" { - return s.handleOpenWindowAction(m) - } - } - return nil -} -``` - -**Why this pattern:** -- Single Wails service (Core) = simpler binding generation -- Services remain decoupled from Wails -- Centralized message routing via `ACTION()` -- Services can communicate internally using same pattern - -**Current gap:** Not all service methods have IPC handlers yet. See `HandleIPCEvents` in each service to understand what's wired up. - -### Generating Bindings +Or with the standard toolchain: ```bash -cd cmd/core-gui -wails3 generate bindings # Regenerate after Go changes +go test ./... ``` -Bindings output to `cmd/core-gui/public/bindings/github.com/Snider/Core/` mirroring Go package structure. +## Docs ---- +The full documentation set lives in `docs/`. -### Service Interfaces (`pkg/core/interfaces.go`) +| Path | Covers | +|------|--------| +| `docs/getting-started.md` | First runnable CoreGO app | +| `docs/primitives.md` | `Options`, `Result`, `Service`, `Message`, `Query`, `Task` | +| `docs/services.md` | Service registry, runtime helpers, service locks | +| `docs/commands.md` | Path-based commands and CLI execution | +| `docs/messaging.md` | `ACTION`, `QUERY`, `QUERYALL`, `PERFORM`, `PerformAsync` | +| `docs/lifecycle.md` | Startup, shutdown, context, and task draining | +| `docs/subsystems.md` | `App`, `Data`, `Drive`, `Fs`, `I18n`, `Cli` | +| `docs/errors.md` | Structured errors, logging helpers, panic recovery | +| `docs/testing.md` | Test naming and framework testing patterns | -```go -type Config interface { - Get(key string, out any) error - Set(key string, v any) error -} +## License -type Display interface { - OpenWindow(opts ...WindowOption) error -} - -type Workspace interface { - CreateWorkspace(identifier, password string) (string, error) - SwitchWorkspace(name string) error - WorkspaceFileGet(filename string) (string, error) - WorkspaceFileSet(filename, content string) error -} - -type Crypt interface { - EncryptPGP(writer io.Writer, recipientPath, data string, ...) (string, error) - DecryptPGP(recipientPath, message, passphrase string, ...) (string, error) -} -``` - ---- - -## Current State (Prototype) - -### Working - -| Package | Notes | -|---------|-------| -| `pkg/core` | Service container, DI, thread-safe - solid | -| `pkg/config` | JSON persistence, XDG paths - solid | -| `pkg/crypt` | Hashing, checksums, PGP - solid, well-tested | -| `pkg/help` | Embedded docs, Show/ShowAt - solid | -| `pkg/i18n` | Multi-language with go-i18n - solid | -| `pkg/io` | Medium interface + local backend - solid | -| `pkg/workspace` | Workspace creation, switching, file ops - functional | - -### Partial - -| Package | Issues | -|---------|--------| -| `pkg/display` | Window creation works; menu/tray handlers are TODOs | - ---- - -## Priority Work Items - -### 1. IMPLEMENT: System Tray Brand Support - -`pkg/display/tray.go:52-63` - Commented brand-specific menu items need implementation. - -### 2. ADD: Integration Tests - -| Package | Notes | -|---------|-------| -| `pkg/display` | Integration tests requiring Wails runtime (27% unit coverage) | - ---- - -## Package Deep Dives - -### pkg/workspace - The Core Feature - -Each workspace is: -1. Identified by LTHN hash of user identifier -2. Has directory structure: `config/`, `log/`, `data/`, `files/`, `keys/` -3. Gets a PGP keypair generated on creation -4. Files accessed via obfuscated paths - -The `workspaceList` maps workspace IDs to public keys. - -### pkg/crypt/openpgp - -Full PGP using `github.com/ProtonMail/go-crypto`: -- `CreateKeyPair(name, passphrase)` - RSA-4096 with revocation cert -- `EncryptPGP()` - Encrypt + optional signing -- `DecryptPGP()` - Decrypt + optional signature verification - -### pkg/io - Storage Abstraction - -```go -type Medium interface { - Read(path string) (string, error) - Write(path, content string) error - EnsureDir(path string) error - IsFile(path string) bool - FileGet(path string) (string, error) - FileSet(path, content string) error -} -``` - -Implementations: `local/`, `sftp/`, `webdav/` - ---- - -## Future Work - -### Phase 1: Core Stability -- [x] ~~Fix workspace medium injection (critical blocker)~~ -- [x] ~~Initialize `io.Local` global~~ -- [x] ~~Clean up dead code (orphaned vars, broken wrappers)~~ -- [x] ~~Wire up IPC handlers for all services (config, crypt, display, help, i18n, workspace)~~ -- [x] ~~Complete display menu handlers (New/List workspace)~~ -- [x] ~~Tray icon setup with asset embedding~~ -- [x] ~~Test coverage for io packages~~ -- [ ] System tray brand-specific menus - -### Phase 2: Multi-Brand Support -- [ ] Define brand configuration system (config? build flags?) -- [ ] Implement brand-specific tray menus (AdminHub, ServerHub, GatewayHub, DeveloperHub, ClientHub) -- [ ] Brand-specific theming/assets -- [ ] Per-brand default workspace configurations - -### Phase 3: Remote Storage -- [ ] Complete SFTP backend (`pkg/io/sftp/`) -- [ ] Complete WebDAV backend (`pkg/io/webdav/`) -- [ ] Workspace sync across storage backends -- [ ] Conflict resolution for multi-device access - -### Phase 4: Enhanced Crypto -- [ ] Key management UI (import/export, key rotation) -- [ ] Multi-recipient encryption -- [ ] Hardware key support (YubiKey, etc.) -- [ ] Encrypted workspace backup/restore - -### Phase 5: Developer Experience -- [ ] TypeScript types for IPC messages (codegen from Go structs) -- [ ] Hot-reload for service registration -- [ ] Plugin system for third-party services -- [ ] CLI tooling for workspace management - -### Phase 6: Distribution -- [ ] Auto-update mechanism -- [ ] Platform installers (DMG, MSI, AppImage) -- [ ] Signing and notarization -- [ ] Crash reporting integration - ---- - -## For New Contributors - -1. Run `task test` to verify all tests pass -2. Follow TDD: `task test-gen` creates stubs, implement to pass -3. The dual-constructor pattern is intentional: `New(deps)` for tests, `Register()` for runtime -4. See `cmd/core-gui/main.go` for how services wire together -5. IPC handlers in each service's `HandleIPCEvents()` are the frontend bridge +EUPL-1.2 diff --git a/Taskfile.yml b/Taskfile.yml deleted file mode 100644 index 12b4872..0000000 --- a/Taskfile.yml +++ /dev/null @@ -1,132 +0,0 @@ -version: '3' - -tasks: - # --- CLI Management --- - cli:build: - desc: "Build core CLI to ./bin/core" - cmds: - - go build -o ./bin/core . - - cli:install: - desc: "Install core CLI to system PATH" - cmds: - - go install . - - # --- Development --- - test: - desc: "Run all tests" - cmds: - - core test - - test:verbose: - desc: "Run all tests with verbose output" - cmds: - - core test --verbose - - test:run: - desc: "Run specific test (use: task test:run -- TestName)" - cmds: - - core test --run {{.CLI_ARGS}} - - cov: - desc: "Run tests with coverage report" - cmds: - - core go cov - - fmt: - desc: "Format Go code" - cmds: - - core go fmt - - lint: - desc: "Run linter" - cmds: - - core go lint - - mod:tidy: - desc: "Run go mod tidy" - cmds: - - core go mod tidy - - # --- Quality Assurance --- - qa: - desc: "Run QA: fmt, vet, lint, test" - cmds: - - core go qa - - qa:quick: - desc: "Quick QA: fmt, vet, lint only" - cmds: - - core go qa quick - - qa:full: - desc: "Full QA: + race, vuln, security" - cmds: - - core go qa full - - qa:fix: - desc: "QA with auto-fix" - cmds: - - core go qa --fix - - # --- Build --- - build: - desc: "Build project with auto-detection" - cmds: - - core build - - build:ci: - desc: "Build for CI (all targets, checksums)" - cmds: - - core build --ci - - # --- Environment --- - doctor: - desc: "Check development environment" - cmds: - - core doctor - - doctor:verbose: - desc: "Check environment with details" - cmds: - - core doctor --verbose - - # --- Code Review --- - review: - desc: "Run CodeRabbit review" - cmds: - - coderabbit review --prompt-only - - check: - desc: "Tidy, test, and review" - cmds: - - task: mod:tidy - - task: test - - task: review - - # --- i18n --- - i18n:generate: - desc: "Regenerate i18n key constants" - cmds: - - go generate ./pkg/i18n/... - - i18n:validate: - desc: "Validate i18n key usage" - cmds: - - go run ./internal/tools/i18n-validate ./... - - # --- Multi-repo (when in workspace) --- - dev:health: - desc: "Check health of all repos" - cmds: - - core dev health - - dev:work: - desc: "Full workflow: status, commit, push" - cmds: - - core dev work - - dev:status: - desc: "Show status of all repos" - cmds: - - core dev work --status diff --git a/app.go b/app.go new file mode 100644 index 0000000..3a5aa02 --- /dev/null +++ b/app.go @@ -0,0 +1,53 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Application identity for the Core framework. +// Based on leaanthony/sail — Name, Filename, Path. + +package core + +import ( + "os/exec" + "path/filepath" +) + +// App holds the application identity and optional GUI runtime. +type App struct { + // Name is the human-readable application name (e.g., "Core CLI"). + Name string + + // Version is the application version string (e.g., "1.2.3"). + Version string + + // Description is a short description of the application. + Description string + + // Filename is the executable filename (e.g., "core"). + Filename string + + // Path is the absolute path to the executable. + Path string + + // Runtime is the GUI runtime (e.g., Wails App). + // Nil for CLI-only applications. + Runtime any +} + +// Find locates a program on PATH and returns a Result containing the App. +// +// r := core.Find("node", "Node.js") +// if r.OK { app := r.Value.(*App) } +func Find(filename, name string) Result { + path, err := exec.LookPath(filename) + if err != nil { + return Result{err, false} + } + abs, err := filepath.Abs(path) + if err != nil { + return Result{err, false} + } + return Result{&App{ + Name: name, + Filename: filename, + Path: abs, + }, true} +} diff --git a/app_test.go b/app_test.go new file mode 100644 index 0000000..406cdb4 --- /dev/null +++ b/app_test.go @@ -0,0 +1,39 @@ +package core_test + +import ( + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- App --- + +func TestApp_Good(t *testing.T) { + c := New(WithOptions(Options{{Key: "name", Value: "myapp"}})).Value.(*Core) + assert.Equal(t, "myapp", c.App().Name) +} + +func TestApp_Empty_Good(t *testing.T) { + c := New().Value.(*Core) + assert.NotNil(t, c.App()) + assert.Equal(t, "", c.App().Name) +} + +func TestApp_Runtime_Good(t *testing.T) { + c := New().Value.(*Core) + c.App().Runtime = &struct{ Name string }{Name: "wails"} + assert.NotNil(t, c.App().Runtime) +} + +func TestApp_Find_Good(t *testing.T) { + r := Find("go", "go") + assert.True(t, r.OK) + app := r.Value.(*App) + assert.NotEmpty(t, app.Path) +} + +func TestApp_Find_Bad(t *testing.T) { + r := Find("nonexistent-binary-xyz", "test") + assert.False(t, r.OK) +} diff --git a/array.go b/array.go new file mode 100644 index 0000000..ff085bb --- /dev/null +++ b/array.go @@ -0,0 +1,101 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Generic slice operations for the Core framework. +// Based on leaanthony/slicer, rewritten with Go 1.18+ generics. + +package core + +// Array is a typed slice with common operations. +type Array[T comparable] struct { + items []T +} + +// NewArray creates an empty Array. +func NewArray[T comparable](items ...T) *Array[T] { + return &Array[T]{items: items} +} + +// Add appends values. +func (s *Array[T]) Add(values ...T) { + s.items = append(s.items, values...) +} + +// AddUnique appends values only if not already present. +func (s *Array[T]) AddUnique(values ...T) { + for _, v := range values { + if !s.Contains(v) { + s.items = append(s.items, v) + } + } +} + +// Contains returns true if the value is in the slice. +func (s *Array[T]) Contains(val T) bool { + for _, v := range s.items { + if v == val { + return true + } + } + return false +} + +// Filter returns a new Array with elements matching the predicate. +func (s *Array[T]) Filter(fn func(T) bool) Result { + filtered := &Array[T]{} + for _, v := range s.items { + if fn(v) { + filtered.items = append(filtered.items, v) + } + } + return Result{filtered, true} +} + +// Each runs a function on every element. +func (s *Array[T]) Each(fn func(T)) { + for _, v := range s.items { + fn(v) + } +} + +// Remove removes the first occurrence of a value. +func (s *Array[T]) Remove(val T) { + for i, v := range s.items { + if v == val { + s.items = append(s.items[:i], s.items[i+1:]...) + return + } + } +} + +// Deduplicate removes duplicate values, preserving order. +func (s *Array[T]) Deduplicate() { + seen := make(map[T]struct{}) + result := make([]T, 0, len(s.items)) + for _, v := range s.items { + if _, exists := seen[v]; !exists { + seen[v] = struct{}{} + result = append(result, v) + } + } + s.items = result +} + +// Len returns the number of elements. +func (s *Array[T]) Len() int { + return len(s.items) +} + +// Clear removes all elements. +func (s *Array[T]) Clear() { + s.items = nil +} + +// AsSlice returns a copy of the underlying slice. +func (s *Array[T]) AsSlice() []T { + if s.items == nil { + return nil + } + out := make([]T, len(s.items)) + copy(out, s.items) + return out +} diff --git a/array_test.go b/array_test.go new file mode 100644 index 0000000..8212d67 --- /dev/null +++ b/array_test.go @@ -0,0 +1,90 @@ +package core_test + +import ( + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- Array[T] --- + +func TestArray_New_Good(t *testing.T) { + a := NewArray("a", "b", "c") + assert.Equal(t, 3, a.Len()) +} + +func TestArray_Add_Good(t *testing.T) { + a := NewArray[string]() + a.Add("x", "y") + assert.Equal(t, 2, a.Len()) + assert.True(t, a.Contains("x")) + assert.True(t, a.Contains("y")) +} + +func TestArray_AddUnique_Good(t *testing.T) { + a := NewArray("a", "b") + a.AddUnique("b", "c") + assert.Equal(t, 3, a.Len()) +} + +func TestArray_Contains_Good(t *testing.T) { + a := NewArray(1, 2, 3) + assert.True(t, a.Contains(2)) + assert.False(t, a.Contains(99)) +} + +func TestArray_Filter_Good(t *testing.T) { + a := NewArray(1, 2, 3, 4, 5) + r := a.Filter(func(n int) bool { return n%2 == 0 }) + assert.True(t, r.OK) + evens := r.Value.(*Array[int]) + assert.Equal(t, 2, evens.Len()) + assert.True(t, evens.Contains(2)) + assert.True(t, evens.Contains(4)) +} + +func TestArray_Each_Good(t *testing.T) { + a := NewArray("a", "b", "c") + var collected []string + a.Each(func(s string) { collected = append(collected, s) }) + assert.Equal(t, []string{"a", "b", "c"}, collected) +} + +func TestArray_Remove_Good(t *testing.T) { + a := NewArray("a", "b", "c") + a.Remove("b") + assert.Equal(t, 2, a.Len()) + assert.False(t, a.Contains("b")) +} + +func TestArray_Remove_Bad(t *testing.T) { + a := NewArray("a", "b") + a.Remove("missing") + assert.Equal(t, 2, a.Len()) +} + +func TestArray_Deduplicate_Good(t *testing.T) { + a := NewArray("a", "b", "a", "c", "b") + a.Deduplicate() + assert.Equal(t, 3, a.Len()) +} + +func TestArray_Clear_Good(t *testing.T) { + a := NewArray(1, 2, 3) + a.Clear() + assert.Equal(t, 0, a.Len()) +} + +func TestArray_AsSlice_Good(t *testing.T) { + a := NewArray("x", "y") + s := a.AsSlice() + assert.Equal(t, []string{"x", "y"}, s) +} + +func TestArray_Empty_Good(t *testing.T) { + a := NewArray[int]() + assert.Equal(t, 0, a.Len()) + assert.False(t, a.Contains(0)) + assert.Equal(t, []int(nil), a.AsSlice()) +} diff --git a/cli.go b/cli.go new file mode 100644 index 0000000..ff7d298 --- /dev/null +++ b/cli.go @@ -0,0 +1,169 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Cli is the CLI surface layer for the Core command tree. +// It reads commands from Core's registry and wires them to terminal I/O. +// +// Run the CLI: +// +// c := core.New(core.Options{{Key: "name", Value: "myapp"}}) +// c.Command("deploy", handler) +// c.Cli().Run() +// +// The Cli resolves os.Args to a command path, parses flags, +// and calls the command's action with parsed options. +package core + +import ( + "io" + "os" +) + +// Cli is the CLI surface for the Core command tree. +type Cli struct { + core *Core + output io.Writer + banner func(*Cli) string +} + +// Print writes to the CLI output (defaults to os.Stdout). +// +// c.Cli().Print("hello %s", "world") +func (cl *Cli) Print(format string, args ...any) { + Print(cl.output, format, args...) +} + +// SetOutput sets the CLI output writer. +// +// c.Cli().SetOutput(os.Stderr) +func (cl *Cli) SetOutput(w io.Writer) { + cl.output = w +} + +// Run resolves os.Args to a command path and executes it. +// +// c.Cli().Run() +// c.Cli().Run("deploy", "to", "homelab") +func (cl *Cli) Run(args ...string) Result { + if len(args) == 0 { + args = os.Args[1:] + } + + clean := FilterArgs(args) + + if cl.core == nil || cl.core.commands == nil { + if cl.banner != nil { + cl.Print(cl.banner(cl)) + } + return Result{} + } + + cl.core.commands.mu.RLock() + cmdCount := len(cl.core.commands.commands) + cl.core.commands.mu.RUnlock() + + if cmdCount == 0 { + if cl.banner != nil { + cl.Print(cl.banner(cl)) + } + return Result{} + } + + // Resolve command path from args + var cmd *Command + var remaining []string + + cl.core.commands.mu.RLock() + for i := len(clean); i > 0; i-- { + path := JoinPath(clean[:i]...) + if c, ok := cl.core.commands.commands[path]; ok { + cmd = c + remaining = clean[i:] + break + } + } + cl.core.commands.mu.RUnlock() + + if cmd == nil { + if cl.banner != nil { + cl.Print(cl.banner(cl)) + } + cl.PrintHelp() + return Result{} + } + + // Build options from remaining args + opts := Options{} + for _, arg := range remaining { + key, val, valid := ParseFlag(arg) + if valid { + if Contains(arg, "=") { + opts = append(opts, Option{Key: key, Value: val}) + } else { + opts = append(opts, Option{Key: key, Value: true}) + } + } else if !IsFlag(arg) { + opts = append(opts, Option{Key: "_arg", Value: arg}) + } + } + + if cmd.Action != nil { + return cmd.Run(opts) + } + if cmd.Lifecycle != nil { + return cmd.Start(opts) + } + return Result{E("core.Cli.Run", Concat("command \"", cmd.Path, "\" is not executable"), nil), false} +} + +// PrintHelp prints available commands. +// +// c.Cli().PrintHelp() +func (cl *Cli) PrintHelp() { + if cl.core == nil || cl.core.commands == nil { + return + } + + name := "" + if cl.core.app != nil { + name = cl.core.app.Name + } + if name != "" { + cl.Print("%s commands:", name) + } else { + cl.Print("Commands:") + } + + cl.core.commands.mu.RLock() + defer cl.core.commands.mu.RUnlock() + + for path, cmd := range cl.core.commands.commands { + if cmd.Hidden || (cmd.Action == nil && cmd.Lifecycle == nil) { + continue + } + tr := cl.core.I18n().Translate(cmd.I18nKey()) + desc, _ := tr.Value.(string) + if desc == "" || desc == cmd.I18nKey() { + cl.Print(" %s", path) + } else { + cl.Print(" %-30s %s", path, desc) + } + } +} + +// SetBanner sets the banner function. +// +// c.Cli().SetBanner(func(_ *core.Cli) string { return "My App v1.0" }) +func (cl *Cli) SetBanner(fn func(*Cli) string) { + cl.banner = fn +} + +// Banner returns the banner string. +func (cl *Cli) Banner() string { + if cl.banner != nil { + return cl.banner(cl) + } + if cl.core != nil && cl.core.app != nil && cl.core.app.Name != "" { + return cl.core.app.Name + } + return "" +} diff --git a/cli_test.go b/cli_test.go new file mode 100644 index 0000000..f29a467 --- /dev/null +++ b/cli_test.go @@ -0,0 +1,85 @@ +package core_test + +import ( + "bytes" + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- Cli Surface --- + +func TestCli_Good(t *testing.T) { + c := New().Value.(*Core) + assert.NotNil(t, c.Cli()) +} + +func TestCli_Banner_Good(t *testing.T) { + c := New(WithOptions(Options{{Key: "name", Value: "myapp"}})).Value.(*Core) + assert.Equal(t, "myapp", c.Cli().Banner()) +} + +func TestCli_SetBanner_Good(t *testing.T) { + c := New().Value.(*Core) + c.Cli().SetBanner(func(_ *Cli) string { return "Custom Banner" }) + assert.Equal(t, "Custom Banner", c.Cli().Banner()) +} + +func TestCli_Run_Good(t *testing.T) { + c := New().Value.(*Core) + executed := false + c.Command("hello", Command{Action: func(_ Options) Result { + executed = true + return Result{Value: "world", OK: true} + }}) + r := c.Cli().Run("hello") + assert.True(t, r.OK) + assert.Equal(t, "world", r.Value) + assert.True(t, executed) +} + +func TestCli_Run_Nested_Good(t *testing.T) { + c := New().Value.(*Core) + executed := false + c.Command("deploy/to/homelab", Command{Action: func(_ Options) Result { + executed = true + return Result{OK: true} + }}) + r := c.Cli().Run("deploy", "to", "homelab") + assert.True(t, r.OK) + assert.True(t, executed) +} + +func TestCli_Run_WithFlags_Good(t *testing.T) { + c := New().Value.(*Core) + var received Options + c.Command("serve", Command{Action: func(opts Options) Result { + received = opts + return Result{OK: true} + }}) + c.Cli().Run("serve", "--port=8080", "--debug") + assert.Equal(t, "8080", received.String("port")) + assert.True(t, received.Bool("debug")) +} + +func TestCli_Run_NoCommand_Good(t *testing.T) { + c := New().Value.(*Core) + r := c.Cli().Run() + assert.False(t, r.OK) +} + +func TestCli_PrintHelp_Good(t *testing.T) { + c := New(WithOptions(Options{{Key: "name", Value: "myapp"}})).Value.(*Core) + c.Command("deploy", Command{Action: func(_ Options) Result { return Result{OK: true} }}) + c.Command("serve", Command{Action: func(_ Options) Result { return Result{OK: true} }}) + c.Cli().PrintHelp() +} + +func TestCli_SetOutput_Good(t *testing.T) { + c := New().Value.(*Core) + var buf bytes.Buffer + c.Cli().SetOutput(&buf) + c.Cli().Print("hello %s", "world") + assert.Contains(t, buf.String(), "hello world") +} diff --git a/command.go b/command.go new file mode 100644 index 0000000..7b74e9f --- /dev/null +++ b/command.go @@ -0,0 +1,208 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Command is a DTO representing an executable operation. +// Commands don't know if they're root, child, or nested — the tree +// structure comes from composition via path-based registration. +// +// Register a command: +// +// c.Command("deploy", func(opts core.Options) core.Result { +// return core.Result{"deployed", true} +// }) +// +// Register a nested command: +// +// c.Command("deploy/to/homelab", handler) +// +// Description is an i18n key — derived from path if omitted: +// +// "deploy" → "cmd.deploy.description" +// "deploy/to/homelab" → "cmd.deploy.to.homelab.description" +package core + +import ( + "sync" +) + +// CommandAction is the function signature for command handlers. +// +// func(opts core.Options) core.Result +type CommandAction func(Options) Result + +// CommandLifecycle is implemented by commands that support managed lifecycle. +// Basic commands only need an action. Daemon commands implement Start/Stop/Signal +// via go-process. +type CommandLifecycle interface { + Start(Options) Result + Stop() Result + Restart() Result + Reload() Result + Signal(string) Result +} + +// Command is the DTO for an executable operation. +type Command struct { + Name string + Description string // i18n key — derived from path if empty + Path string // "deploy/to/homelab" + Action CommandAction // business logic + Lifecycle CommandLifecycle // optional — provided by go-process + Flags Options // declared flags + Hidden bool + commands map[string]*Command // child commands (internal) + mu sync.RWMutex +} + +// I18nKey returns the i18n key for this command's description. +// +// cmd with path "deploy/to/homelab" → "cmd.deploy.to.homelab.description" +func (cmd *Command) I18nKey() string { + if cmd.Description != "" { + return cmd.Description + } + path := cmd.Path + if path == "" { + path = cmd.Name + } + return Concat("cmd.", Replace(path, "/", "."), ".description") +} + +// Run executes the command's action with the given options. +// +// result := cmd.Run(core.Options{{Key: "target", Value: "homelab"}}) +func (cmd *Command) Run(opts Options) Result { + if cmd.Action == nil { + return Result{E("core.Command.Run", Concat("command \"", cmd.Path, "\" is not executable"), nil), false} + } + return cmd.Action(opts) +} + +// Start delegates to the lifecycle implementation if available. +func (cmd *Command) Start(opts Options) Result { + if cmd.Lifecycle != nil { + return cmd.Lifecycle.Start(opts) + } + return cmd.Run(opts) +} + +// Stop delegates to the lifecycle implementation. +func (cmd *Command) Stop() Result { + if cmd.Lifecycle != nil { + return cmd.Lifecycle.Stop() + } + return Result{} +} + +// Restart delegates to the lifecycle implementation. +func (cmd *Command) Restart() Result { + if cmd.Lifecycle != nil { + return cmd.Lifecycle.Restart() + } + return Result{} +} + +// Reload delegates to the lifecycle implementation. +func (cmd *Command) Reload() Result { + if cmd.Lifecycle != nil { + return cmd.Lifecycle.Reload() + } + return Result{} +} + +// Signal delegates to the lifecycle implementation. +func (cmd *Command) Signal(sig string) Result { + if cmd.Lifecycle != nil { + return cmd.Lifecycle.Signal(sig) + } + return Result{} +} + +// --- Command Registry (on Core) --- + +// commandRegistry holds the command tree. +type commandRegistry struct { + commands map[string]*Command + mu sync.RWMutex +} + +// Command gets or registers a command by path. +// +// c.Command("deploy", Command{Action: handler}) +// r := c.Command("deploy") +func (c *Core) Command(path string, command ...Command) Result { + if len(command) == 0 { + c.commands.mu.RLock() + cmd, ok := c.commands.commands[path] + c.commands.mu.RUnlock() + return Result{cmd, ok} + } + + if path == "" || HasPrefix(path, "/") || HasSuffix(path, "/") || Contains(path, "//") { + return Result{E("core.Command", Concat("invalid command path: \"", path, "\""), nil), false} + } + + c.commands.mu.Lock() + defer c.commands.mu.Unlock() + + if existing, exists := c.commands.commands[path]; exists && (existing.Action != nil || existing.Lifecycle != nil) { + return Result{E("core.Command", Concat("command \"", path, "\" already registered"), nil), false} + } + + cmd := &command[0] + cmd.Name = pathName(path) + cmd.Path = path + if cmd.commands == nil { + cmd.commands = make(map[string]*Command) + } + + // Preserve existing subtree when overwriting a placeholder parent + if existing, exists := c.commands.commands[path]; exists { + for k, v := range existing.commands { + if _, has := cmd.commands[k]; !has { + cmd.commands[k] = v + } + } + } + + c.commands.commands[path] = cmd + + // Build parent chain — "deploy/to/homelab" creates "deploy" and "deploy/to" if missing + parts := Split(path, "/") + for i := len(parts) - 1; i > 0; i-- { + parentPath := JoinPath(parts[:i]...) + if _, exists := c.commands.commands[parentPath]; !exists { + c.commands.commands[parentPath] = &Command{ + Name: parts[i-1], + Path: parentPath, + commands: make(map[string]*Command), + } + } + c.commands.commands[parentPath].commands[parts[i]] = cmd + cmd = c.commands.commands[parentPath] + } + + return Result{OK: true} +} + +// Commands returns all registered command paths. +// +// paths := c.Commands() +func (c *Core) Commands() []string { + if c.commands == nil { + return nil + } + c.commands.mu.RLock() + defer c.commands.mu.RUnlock() + var paths []string + for k := range c.commands.commands { + paths = append(paths, k) + } + return paths +} + +// pathName extracts the last segment of a path. +// "deploy/to/homelab" → "homelab" +func pathName(path string) string { + parts := Split(path, "/") + return parts[len(parts)-1] +} diff --git a/command_test.go b/command_test.go new file mode 100644 index 0000000..3e0bb91 --- /dev/null +++ b/command_test.go @@ -0,0 +1,217 @@ +package core_test + +import ( + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- Command DTO --- + +func TestCommand_Register_Good(t *testing.T) { + c := New().Value.(*Core) + r := c.Command("deploy", Command{Action: func(_ Options) Result { + return Result{Value: "deployed", OK: true} + }}) + assert.True(t, r.OK) +} + +func TestCommand_Get_Good(t *testing.T) { + c := New().Value.(*Core) + c.Command("deploy", Command{Action: func(_ Options) Result { return Result{OK: true} }}) + r := c.Command("deploy") + assert.True(t, r.OK) + assert.NotNil(t, r.Value) +} + +func TestCommand_Get_Bad(t *testing.T) { + c := New().Value.(*Core) + r := c.Command("nonexistent") + assert.False(t, r.OK) +} + +func TestCommand_Run_Good(t *testing.T) { + c := New().Value.(*Core) + c.Command("greet", Command{Action: func(opts Options) Result { + return Result{Value: Concat("hello ", opts.String("name")), OK: true} + }}) + cmd := c.Command("greet").Value.(*Command) + r := cmd.Run(Options{{Key: "name", Value: "world"}}) + assert.True(t, r.OK) + assert.Equal(t, "hello world", r.Value) +} + +func TestCommand_Run_NoAction_Good(t *testing.T) { + c := New().Value.(*Core) + c.Command("empty", Command{Description: "no action"}) + cmd := c.Command("empty").Value.(*Command) + r := cmd.Run(Options{}) + assert.False(t, r.OK) +} + +// --- Nested Commands --- + +func TestCommand_Nested_Good(t *testing.T) { + c := New().Value.(*Core) + c.Command("deploy/to/homelab", Command{Action: func(_ Options) Result { + return Result{Value: "deployed to homelab", OK: true} + }}) + + r := c.Command("deploy/to/homelab") + assert.True(t, r.OK) + + // Parent auto-created + assert.True(t, c.Command("deploy").OK) + assert.True(t, c.Command("deploy/to").OK) +} + +func TestCommand_Paths_Good(t *testing.T) { + c := New().Value.(*Core) + c.Command("deploy", Command{Action: func(_ Options) Result { return Result{OK: true} }}) + c.Command("serve", Command{Action: func(_ Options) Result { return Result{OK: true} }}) + c.Command("deploy/to/homelab", Command{Action: func(_ Options) Result { return Result{OK: true} }}) + + paths := c.Commands() + assert.Contains(t, paths, "deploy") + assert.Contains(t, paths, "serve") + assert.Contains(t, paths, "deploy/to/homelab") + assert.Contains(t, paths, "deploy/to") +} + +// --- I18n Key Derivation --- + +func TestCommand_I18nKey_Good(t *testing.T) { + c := New().Value.(*Core) + c.Command("deploy/to/homelab", Command{}) + cmd := c.Command("deploy/to/homelab").Value.(*Command) + assert.Equal(t, "cmd.deploy.to.homelab.description", cmd.I18nKey()) +} + +func TestCommand_I18nKey_Custom_Good(t *testing.T) { + c := New().Value.(*Core) + c.Command("deploy", Command{Description: "custom.deploy.key"}) + cmd := c.Command("deploy").Value.(*Command) + assert.Equal(t, "custom.deploy.key", cmd.I18nKey()) +} + +func TestCommand_I18nKey_Simple_Good(t *testing.T) { + c := New().Value.(*Core) + c.Command("serve", Command{}) + cmd := c.Command("serve").Value.(*Command) + assert.Equal(t, "cmd.serve.description", cmd.I18nKey()) +} + +// --- Lifecycle --- + +func TestCommand_Lifecycle_NoImpl_Good(t *testing.T) { + c := New().Value.(*Core) + c.Command("serve", Command{Action: func(_ Options) Result { + return Result{Value: "running", OK: true} + }}) + cmd := c.Command("serve").Value.(*Command) + + r := cmd.Start(Options{}) + assert.True(t, r.OK) + assert.Equal(t, "running", r.Value) + + assert.False(t, cmd.Stop().OK) + assert.False(t, cmd.Restart().OK) + assert.False(t, cmd.Reload().OK) + assert.False(t, cmd.Signal("HUP").OK) +} + +// --- Lifecycle with Implementation --- + +type testLifecycle struct { + started bool + stopped bool + restarted bool + reloaded bool + signalled string +} + +func (l *testLifecycle) Start(opts Options) Result { + l.started = true + return Result{Value: "started", OK: true} +} +func (l *testLifecycle) Stop() Result { + l.stopped = true + return Result{OK: true} +} +func (l *testLifecycle) Restart() Result { + l.restarted = true + return Result{OK: true} +} +func (l *testLifecycle) Reload() Result { + l.reloaded = true + return Result{OK: true} +} +func (l *testLifecycle) Signal(sig string) Result { + l.signalled = sig + return Result{Value: sig, OK: true} +} + +func TestCommand_Lifecycle_WithImpl_Good(t *testing.T) { + c := New().Value.(*Core) + lc := &testLifecycle{} + c.Command("daemon", Command{Lifecycle: lc}) + cmd := c.Command("daemon").Value.(*Command) + + r := cmd.Start(Options{}) + assert.True(t, r.OK) + assert.True(t, lc.started) + + assert.True(t, cmd.Stop().OK) + assert.True(t, lc.stopped) + + assert.True(t, cmd.Restart().OK) + assert.True(t, lc.restarted) + + assert.True(t, cmd.Reload().OK) + assert.True(t, lc.reloaded) + + r = cmd.Signal("HUP") + assert.True(t, r.OK) + assert.Equal(t, "HUP", lc.signalled) +} + +func TestCommand_Duplicate_Bad(t *testing.T) { + c := New().Value.(*Core) + c.Command("deploy", Command{Action: func(_ Options) Result { return Result{OK: true} }}) + r := c.Command("deploy", Command{Action: func(_ Options) Result { return Result{OK: true} }}) + assert.False(t, r.OK) +} + +func TestCommand_InvalidPath_Bad(t *testing.T) { + c := New().Value.(*Core) + assert.False(t, c.Command("/leading", Command{}).OK) + assert.False(t, c.Command("trailing/", Command{}).OK) + assert.False(t, c.Command("double//slash", Command{}).OK) +} + +// --- Cli Run with Lifecycle --- + +func TestCli_Run_Lifecycle_Good(t *testing.T) { + c := New().Value.(*Core) + lc := &testLifecycle{} + c.Command("serve", Command{Lifecycle: lc}) + r := c.Cli().Run("serve") + assert.True(t, r.OK) + assert.True(t, lc.started) +} + +func TestCli_Run_NoActionNoLifecycle_Bad(t *testing.T) { + c := New().Value.(*Core) + c.Command("empty", Command{}) + r := c.Cli().Run("empty") + assert.False(t, r.OK) +} + +// --- Empty path --- + +func TestCommand_EmptyPath_Bad(t *testing.T) { + c := New().Value.(*Core) + r := c.Command("", Command{}) + assert.False(t, r.OK) +} diff --git a/config.go b/config.go new file mode 100644 index 0000000..395a0f6 --- /dev/null +++ b/config.go @@ -0,0 +1,135 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Settings, feature flags, and typed configuration for the Core framework. + +package core + +import ( + "sync" +) + +// ConfigVar is a variable that can be set, unset, and queried for its state. +type ConfigVar[T any] struct { + val T + set bool +} + +func (v *ConfigVar[T]) Get() T { return v.val } +func (v *ConfigVar[T]) Set(val T) { v.val = val; v.set = true } +func (v *ConfigVar[T]) IsSet() bool { return v.set } +func (v *ConfigVar[T]) Unset() { + v.set = false + var zero T + v.val = zero +} + +func NewConfigVar[T any](val T) ConfigVar[T] { + return ConfigVar[T]{val: val, set: true} +} + +// ConfigOptions holds configuration data. +type ConfigOptions struct { + Settings map[string]any + Features map[string]bool +} + +func (o *ConfigOptions) init() { + if o.Settings == nil { + o.Settings = make(map[string]any) + } + if o.Features == nil { + o.Features = make(map[string]bool) + } +} + +// Config holds configuration settings and feature flags. +type Config struct { + *ConfigOptions + mu sync.RWMutex +} + +// Set stores a configuration value by key. +func (e *Config) Set(key string, val any) { + e.mu.Lock() + if e.ConfigOptions == nil { + e.ConfigOptions = &ConfigOptions{} + } + e.ConfigOptions.init() + e.Settings[key] = val + e.mu.Unlock() +} + +// Get retrieves a configuration value by key. +func (e *Config) Get(key string) Result { + e.mu.RLock() + defer e.mu.RUnlock() + if e.ConfigOptions == nil || e.Settings == nil { + return Result{} + } + val, ok := e.Settings[key] + if !ok { + return Result{} + } + return Result{val, true} +} + +func (e *Config) String(key string) string { return ConfigGet[string](e, key) } +func (e *Config) Int(key string) int { return ConfigGet[int](e, key) } +func (e *Config) Bool(key string) bool { return ConfigGet[bool](e, key) } + +// ConfigGet retrieves a typed configuration value. +func ConfigGet[T any](e *Config, key string) T { + r := e.Get(key) + if !r.OK { + var zero T + return zero + } + typed, _ := r.Value.(T) + return typed +} + +// --- Feature Flags --- + +func (e *Config) Enable(feature string) { + e.mu.Lock() + if e.ConfigOptions == nil { + e.ConfigOptions = &ConfigOptions{} + } + e.ConfigOptions.init() + e.Features[feature] = true + e.mu.Unlock() +} + +func (e *Config) Disable(feature string) { + e.mu.Lock() + if e.ConfigOptions == nil { + e.ConfigOptions = &ConfigOptions{} + } + e.ConfigOptions.init() + e.Features[feature] = false + e.mu.Unlock() +} + +func (e *Config) Enabled(feature string) bool { + e.mu.RLock() + defer e.mu.RUnlock() + if e.ConfigOptions == nil || e.Features == nil { + return false + } + return e.Features[feature] +} + +func (e *Config) EnabledFeatures() []string { + e.mu.RLock() + defer e.mu.RUnlock() + if e.ConfigOptions == nil || e.Features == nil { + return nil + } + var result []string + for k, v := range e.Features { + if v { + result = append(result, k) + } + } + return result +} diff --git a/config_test.go b/config_test.go new file mode 100644 index 0000000..b669e60 --- /dev/null +++ b/config_test.go @@ -0,0 +1,102 @@ +package core_test + +import ( + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- Config --- + +func TestConfig_SetGet_Good(t *testing.T) { + c := New().Value.(*Core) + c.Config().Set("api_url", "https://api.lthn.ai") + c.Config().Set("max_agents", 5) + + r := c.Config().Get("api_url") + assert.True(t, r.OK) + assert.Equal(t, "https://api.lthn.ai", r.Value) +} + +func TestConfig_Get_Bad(t *testing.T) { + c := New().Value.(*Core) + r := c.Config().Get("missing") + assert.False(t, r.OK) + assert.Nil(t, r.Value) +} + +func TestConfig_TypedAccessors_Good(t *testing.T) { + c := New().Value.(*Core) + c.Config().Set("url", "https://lthn.ai") + c.Config().Set("port", 8080) + c.Config().Set("debug", true) + + assert.Equal(t, "https://lthn.ai", c.Config().String("url")) + assert.Equal(t, 8080, c.Config().Int("port")) + assert.True(t, c.Config().Bool("debug")) +} + +func TestConfig_TypedAccessors_Bad(t *testing.T) { + c := New().Value.(*Core) + // Missing keys return zero values + assert.Equal(t, "", c.Config().String("missing")) + assert.Equal(t, 0, c.Config().Int("missing")) + assert.False(t, c.Config().Bool("missing")) +} + +// --- Feature Flags --- + +func TestConfig_Features_Good(t *testing.T) { + c := New().Value.(*Core) + c.Config().Enable("dark-mode") + c.Config().Enable("beta") + + assert.True(t, c.Config().Enabled("dark-mode")) + assert.True(t, c.Config().Enabled("beta")) + assert.False(t, c.Config().Enabled("missing")) +} + +func TestConfig_Features_Disable_Good(t *testing.T) { + c := New().Value.(*Core) + c.Config().Enable("feature") + assert.True(t, c.Config().Enabled("feature")) + + c.Config().Disable("feature") + assert.False(t, c.Config().Enabled("feature")) +} + +func TestConfig_Features_CaseSensitive(t *testing.T) { + c := New().Value.(*Core) + c.Config().Enable("Feature") + assert.True(t, c.Config().Enabled("Feature")) + assert.False(t, c.Config().Enabled("feature")) +} + +func TestConfig_EnabledFeatures_Good(t *testing.T) { + c := New().Value.(*Core) + c.Config().Enable("a") + c.Config().Enable("b") + c.Config().Enable("c") + c.Config().Disable("b") + + features := c.Config().EnabledFeatures() + assert.Contains(t, features, "a") + assert.Contains(t, features, "c") + assert.NotContains(t, features, "b") +} + +// --- ConfigVar --- + +func TestConfigVar_Good(t *testing.T) { + v := NewConfigVar("hello") + assert.True(t, v.IsSet()) + assert.Equal(t, "hello", v.Get()) + + v.Set("world") + assert.Equal(t, "world", v.Get()) + + v.Unset() + assert.False(t, v.IsSet()) + assert.Equal(t, "", v.Get()) +} diff --git a/contract.go b/contract.go new file mode 100644 index 0000000..ddd9904 --- /dev/null +++ b/contract.go @@ -0,0 +1,157 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Contracts, options, and type definitions for the Core framework. + +package core + +import ( + "context" +) + +// Message is the type for IPC broadcasts (fire-and-forget). +type Message any + +// Query is the type for read-only IPC requests. +type Query any + +// Task is the type for IPC requests that perform side effects. +type Task any + +// TaskWithIdentifier is an optional interface for tasks that need to know their assigned identifier. +type TaskWithIdentifier interface { + Task + SetTaskIdentifier(id string) + GetTaskIdentifier() string +} + +// QueryHandler handles Query requests. Returns Result{Value, OK}. +type QueryHandler func(*Core, Query) Result + +// TaskHandler handles Task requests. Returns Result{Value, OK}. +type TaskHandler func(*Core, Task) Result + +// Startable is implemented by services that need startup initialisation. +type Startable interface { + OnStartup(ctx context.Context) error +} + +// Stoppable is implemented by services that need shutdown cleanup. +type Stoppable interface { + OnShutdown(ctx context.Context) error +} + +// --- Action Messages --- + +type ActionServiceStartup struct{} +type ActionServiceShutdown struct{} + +type ActionTaskStarted struct { + TaskIdentifier string + Task Task +} + +type ActionTaskProgress struct { + TaskIdentifier string + Task Task + Progress float64 + Message string +} + +type ActionTaskCompleted struct { + TaskIdentifier string + Task Task + Result any + Error error +} + +// --- Constructor --- + +// CoreOption is a functional option applied during Core construction. +// Returns Result — if !OK, New() stops and returns the error. +// +// core.New( +// core.WithService(agentic.Register), +// core.WithService(monitor.Register), +// core.WithServiceLock(), +// ) +type CoreOption func(*Core) Result + +// New initialises a Core instance by applying options in order. +// Services registered here form the application conclave — they share +// IPC access and participate in the lifecycle (ServiceStartup/ServiceShutdown). +// +// r := core.New( +// core.WithOptions(core.Options{{Key: "name", Value: "myapp"}}), +// core.WithService(auth.Register), +// core.WithServiceLock(), +// ) +// if !r.OK { log.Fatal(r.Value) } +// c := r.Value.(*Core) +func New(opts ...CoreOption) Result { + c := &Core{ + app: &App{}, + data: &Data{}, + drive: &Drive{}, + fs: &Fs{root: "/"}, + config: &Config{ConfigOptions: &ConfigOptions{}}, + error: &ErrorPanic{}, + log: &ErrorLog{log: Default()}, + lock: &Lock{}, + ipc: &Ipc{}, + info: systemInfo, + i18n: &I18n{}, + services: &serviceRegistry{services: make(map[string]*Service)}, + commands: &commandRegistry{commands: make(map[string]*Command)}, + } + c.context, c.cancel = context.WithCancel(context.Background()) + c.cli = &Cli{core: c} + + for _, opt := range opts { + if r := opt(c); !r.OK { + return r + } + } + + return Result{c, true} +} + +// WithOptions applies key-value configuration to Core. +// +// core.WithOptions(core.Options{{Key: "name", Value: "myapp"}}) +func WithOptions(opts Options) CoreOption { + return func(c *Core) Result { + c.options = &opts + if name := opts.String("name"); name != "" { + c.app.Name = name + } + return Result{OK: true} + } +} + +// WithService registers a service via its factory function. +// The factory receives *Core so the service can wire IPC handlers +// and access other subsystems during construction. +// Service name is auto-discovered from the package path. +// If the service implements HandleIPCEvents, it is auto-registered. +// +// core.WithService(agentic.Register) +// core.WithService(display.Register(nil)) +func WithService(factory func(*Core) Result) CoreOption { + return func(c *Core) Result { + return factory(c) + } +} + +// WithServiceLock prevents further service registration after construction. +// +// core.New( +// core.WithService(auth.Register), +// core.WithServiceLock(), +// ) +func WithServiceLock() CoreOption { + return func(c *Core) Result { + c.LockEnable() + c.LockApply() + return Result{OK: true} + } +} diff --git a/core.go b/core.go new file mode 100644 index 0000000..fb9c5d9 --- /dev/null +++ b/core.go @@ -0,0 +1,83 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Package core is a dependency injection and service lifecycle framework for Go. +// This file defines the Core struct, accessors, and IPC/error wrappers. + +package core + +import ( + "context" + "sync" + "sync/atomic" +) + +// --- Core Struct --- + +// Core is the central application object that manages services, assets, and communication. +type Core struct { + options *Options // c.Options() — Input configuration used to create this Core + app *App // c.App() — Application identity + optional GUI runtime + data *Data // c.Data() — Embedded/stored content from packages + drive *Drive // c.Drive() — Resource handle registry (transports) + fs *Fs // c.Fs() — Local filesystem I/O (sandboxable) + config *Config // c.Config() — Configuration, settings, feature flags + error *ErrorPanic // c.Error() — Panic recovery and crash reporting + log *ErrorLog // c.Log() — Structured logging + error wrapping + cli *Cli // c.Cli() — CLI surface layer + commands *commandRegistry // c.Command("path") — Command tree + services *serviceRegistry // c.Service("name") — Service registry + lock *Lock // c.Lock("name") — Named mutexes + ipc *Ipc // c.IPC() — Message bus for IPC + info *SysInfo // c.Env("key") — Read-only system/environment information + i18n *I18n // c.I18n() — Internationalisation and locale collection + + context context.Context + cancel context.CancelFunc + taskIDCounter atomic.Uint64 + waitGroup sync.WaitGroup + shutdown atomic.Bool +} + +// --- Accessors --- + +func (c *Core) Options() *Options { return c.options } +func (c *Core) App() *App { return c.app } +func (c *Core) Data() *Data { return c.data } +func (c *Core) Drive() *Drive { return c.drive } +func (c *Core) Embed() Result { return c.data.Get("app") } // legacy — use Data() +func (c *Core) Fs() *Fs { return c.fs } +func (c *Core) Config() *Config { return c.config } +func (c *Core) Error() *ErrorPanic { return c.error } +func (c *Core) Log() *ErrorLog { return c.log } +func (c *Core) Cli() *Cli { return c.cli } +func (c *Core) IPC() *Ipc { return c.ipc } +func (c *Core) I18n() *I18n { return c.i18n } +func (c *Core) Env(key string) string { return Env(key) } +func (c *Core) Context() context.Context { return c.context } +func (c *Core) Core() *Core { return c } + +// --- IPC (uppercase aliases) --- + +func (c *Core) ACTION(msg Message) Result { return c.Action(msg) } +func (c *Core) QUERY(q Query) Result { return c.Query(q) } +func (c *Core) QUERYALL(q Query) Result { return c.QueryAll(q) } +func (c *Core) PERFORM(t Task) Result { return c.Perform(t) } + +// --- Error+Log --- + +// LogError logs an error and returns the Result from ErrorLog. +func (c *Core) LogError(err error, op, msg string) Result { + return c.log.Error(err, op, msg) +} + +// LogWarn logs a warning and returns the Result from ErrorLog. +func (c *Core) LogWarn(err error, op, msg string) Result { + return c.log.Warn(err, op, msg) +} + +// Must logs and panics if err is not nil. +func (c *Core) Must(err error, op, msg string) { + c.log.Must(err, op, msg) +} + +// --- Global Instance --- diff --git a/core_test.go b/core_test.go new file mode 100644 index 0000000..4cf6c29 --- /dev/null +++ b/core_test.go @@ -0,0 +1,135 @@ +package core_test + +import ( + "context" + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- New --- + +func TestNew_Good(t *testing.T) { + c := New().Value.(*Core) + assert.NotNil(t, c) +} + +func TestNew_WithOptions_Good(t *testing.T) { + c := New(WithOptions(Options{{Key: "name", Value: "myapp"}})).Value.(*Core) + assert.NotNil(t, c) + assert.Equal(t, "myapp", c.App().Name) +} + +func TestNew_WithOptions_Bad(t *testing.T) { + // Empty options — should still create a valid Core + c := New(WithOptions(Options{})).Value.(*Core) + assert.NotNil(t, c) +} + +func TestNew_WithService_Good(t *testing.T) { + started := false + r := New( + WithOptions(Options{{Key: "name", Value: "myapp"}}), + WithService(func(c *Core) Result { + c.Service("test", Service{ + OnStart: func() Result { started = true; return Result{OK: true} }, + }) + return Result{OK: true} + }), + ) + assert.True(t, r.OK) + c := r.Value.(*Core) + + svc := c.Service("test") + assert.True(t, svc.OK) + + c.ServiceStartup(context.Background(), nil) + assert.True(t, started) +} + +func TestNew_WithServiceLock_Good(t *testing.T) { + r := New( + WithService(func(c *Core) Result { + c.Service("allowed", Service{}) + return Result{OK: true} + }), + WithServiceLock(), + ) + assert.True(t, r.OK) + c := r.Value.(*Core) + + // Registration after lock should fail + reg := c.Service("blocked", Service{}) + assert.False(t, reg.OK) +} + +// --- Accessors --- + +func TestAccessors_Good(t *testing.T) { + c := New().Value.(*Core) + assert.NotNil(t, c.App()) + assert.NotNil(t, c.Data()) + assert.NotNil(t, c.Drive()) + assert.NotNil(t, c.Fs()) + assert.NotNil(t, c.Config()) + assert.NotNil(t, c.Error()) + assert.NotNil(t, c.Log()) + assert.NotNil(t, c.Cli()) + assert.NotNil(t, c.IPC()) + assert.NotNil(t, c.I18n()) + assert.Equal(t, c, c.Core()) +} + +func TestOptions_Accessor_Good(t *testing.T) { + c := New(WithOptions(Options{ + {Key: "name", Value: "testapp"}, + {Key: "port", Value: 8080}, + {Key: "debug", Value: true}, + })).Value.(*Core) + opts := c.Options() + assert.NotNil(t, opts) + assert.Equal(t, "testapp", opts.String("name")) + assert.Equal(t, 8080, opts.Int("port")) + assert.True(t, opts.Bool("debug")) +} + +func TestOptions_Accessor_Nil(t *testing.T) { + c := New().Value.(*Core) + // No options passed — Options() returns nil + assert.Nil(t, c.Options()) +} + +// --- Core Error/Log Helpers --- + +func TestCore_LogError_Good(t *testing.T) { + c := New().Value.(*Core) + cause := assert.AnError + r := c.LogError(cause, "test.Operation", "something broke") + assert.False(t, r.OK) + err, ok := r.Value.(error) + assert.True(t, ok) + assert.ErrorIs(t, err, cause) +} + +func TestCore_LogWarn_Good(t *testing.T) { + c := New().Value.(*Core) + r := c.LogWarn(assert.AnError, "test.Operation", "heads up") + assert.False(t, r.OK) + _, ok := r.Value.(error) + assert.True(t, ok) +} + +func TestCore_Must_Ugly(t *testing.T) { + c := New().Value.(*Core) + assert.Panics(t, func() { + c.Must(assert.AnError, "test.Operation", "fatal") + }) +} + +func TestCore_Must_Nil_Good(t *testing.T) { + c := New().Value.(*Core) + assert.NotPanics(t, func() { + c.Must(nil, "test.Operation", "no error") + }) +} diff --git a/data.go b/data.go new file mode 100644 index 0000000..3fa5d7b --- /dev/null +++ b/data.go @@ -0,0 +1,202 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Data is the embedded/stored content system for core packages. +// Packages mount their embedded content here and other packages +// read from it by path. +// +// Mount a package's assets: +// +// c.Data().New(core.Options{ +// {Key: "name", Value: "brain"}, +// {Key: "source", Value: brainFS}, +// {Key: "path", Value: "prompts"}, +// }) +// +// Read from any mounted path: +// +// content := c.Data().ReadString("brain/coding.md") +// entries := c.Data().List("agent/flow") +// +// Extract a template directory: +// +// c.Data().Extract("agent/workspace/default", "/tmp/ws", data) +package core + +import ( + "io/fs" + "path/filepath" + "sync" +) + +// Data manages mounted embedded filesystems from core packages. +type Data struct { + mounts map[string]*Embed + mu sync.RWMutex +} + +// New registers an embedded filesystem under a named prefix. +// +// c.Data().New(core.Options{ +// {Key: "name", Value: "brain"}, +// {Key: "source", Value: brainFS}, +// {Key: "path", Value: "prompts"}, +// }) +func (d *Data) New(opts Options) Result { + name := opts.String("name") + if name == "" { + return Result{} + } + + r := opts.Get("source") + if !r.OK { + return r + } + + fsys, ok := r.Value.(fs.FS) + if !ok { + return Result{E("data.New", "source is not fs.FS", nil), false} + } + + path := opts.String("path") + if path == "" { + path = "." + } + + d.mu.Lock() + defer d.mu.Unlock() + + if d.mounts == nil { + d.mounts = make(map[string]*Embed) + } + + mr := Mount(fsys, path) + if !mr.OK { + return mr + } + + emb := mr.Value.(*Embed) + d.mounts[name] = emb + return Result{emb, true} +} + +// Get returns the Embed for a named mount point. +// +// r := c.Data().Get("brain") +// if r.OK { emb := r.Value.(*Embed) } +func (d *Data) Get(name string) Result { + d.mu.RLock() + defer d.mu.RUnlock() + if d.mounts == nil { + return Result{} + } + emb, ok := d.mounts[name] + if !ok { + return Result{} + } + return Result{emb, true} +} + +// resolve splits a path like "brain/coding.md" into mount name + relative path. +func (d *Data) resolve(path string) (*Embed, string) { + d.mu.RLock() + defer d.mu.RUnlock() + + parts := SplitN(path, "/", 2) + if len(parts) < 2 { + return nil, "" + } + if d.mounts == nil { + return nil, "" + } + emb := d.mounts[parts[0]] + return emb, parts[1] +} + +// ReadFile reads a file by full path. +// +// r := c.Data().ReadFile("brain/prompts/coding.md") +// if r.OK { data := r.Value.([]byte) } +func (d *Data) ReadFile(path string) Result { + emb, rel := d.resolve(path) + if emb == nil { + return Result{} + } + return emb.ReadFile(rel) +} + +// ReadString reads a file as a string. +// +// r := c.Data().ReadString("agent/flow/deploy/to/homelab.yaml") +// if r.OK { content := r.Value.(string) } +func (d *Data) ReadString(path string) Result { + r := d.ReadFile(path) + if !r.OK { + return r + } + return Result{string(r.Value.([]byte)), true} +} + +// List returns directory entries at a path. +// +// r := c.Data().List("agent/persona/code") +// if r.OK { entries := r.Value.([]fs.DirEntry) } +func (d *Data) List(path string) Result { + emb, rel := d.resolve(path) + if emb == nil { + return Result{} + } + r := emb.ReadDir(rel) + if !r.OK { + return r + } + return Result{r.Value, true} +} + +// ListNames returns filenames (without extensions) at a path. +// +// r := c.Data().ListNames("agent/flow") +// if r.OK { names := r.Value.([]string) } +func (d *Data) ListNames(path string) Result { + r := d.List(path) + if !r.OK { + return r + } + entries := r.Value.([]fs.DirEntry) + var names []string + for _, e := range entries { + name := e.Name() + if !e.IsDir() { + name = TrimSuffix(name, filepath.Ext(name)) + } + names = append(names, name) + } + return Result{names, true} +} + +// Extract copies a template directory to targetDir. +// +// r := c.Data().Extract("agent/workspace/default", "/tmp/ws", templateData) +func (d *Data) Extract(path, targetDir string, templateData any) Result { + emb, rel := d.resolve(path) + if emb == nil { + return Result{} + } + r := emb.Sub(rel) + if !r.OK { + return r + } + return Extract(r.Value.(*Embed).FS(), targetDir, templateData) +} + +// Mounts returns the names of all mounted content. +// +// names := c.Data().Mounts() +func (d *Data) Mounts() []string { + d.mu.RLock() + defer d.mu.RUnlock() + var names []string + for k := range d.mounts { + names = append(names, k) + } + return names +} diff --git a/data_test.go b/data_test.go new file mode 100644 index 0000000..81ade81 --- /dev/null +++ b/data_test.go @@ -0,0 +1,130 @@ +package core_test + +import ( + "embed" + "io" + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +//go:embed testdata +var testFS embed.FS + +// --- Data (Embedded Content Mounts) --- + +func TestData_New_Good(t *testing.T) { + c := New().Value.(*Core) + r := c.Data().New(Options{ + {Key: "name", Value: "test"}, + {Key: "source", Value: testFS}, + {Key: "path", Value: "testdata"}, + }) + assert.True(t, r.OK) + assert.NotNil(t, r.Value) +} + +func TestData_New_Bad(t *testing.T) { + c := New().Value.(*Core) + + r := c.Data().New(Options{{Key: "source", Value: testFS}}) + assert.False(t, r.OK) + + r = c.Data().New(Options{{Key: "name", Value: "test"}}) + assert.False(t, r.OK) + + r = c.Data().New(Options{{Key: "name", Value: "test"}, {Key: "source", Value: "not-an-fs"}}) + assert.False(t, r.OK) +} + +func TestData_ReadString_Good(t *testing.T) { + c := New().Value.(*Core) + c.Data().New(Options{{Key: "name", Value: "app"}, {Key: "source", Value: testFS}, {Key: "path", Value: "testdata"}}) + r := c.Data().ReadString("app/test.txt") + assert.True(t, r.OK) + assert.Equal(t, "hello from testdata\n", r.Value.(string)) +} + +func TestData_ReadString_Bad(t *testing.T) { + c := New().Value.(*Core) + r := c.Data().ReadString("nonexistent/file.txt") + assert.False(t, r.OK) +} + +func TestData_ReadFile_Good(t *testing.T) { + c := New().Value.(*Core) + c.Data().New(Options{{Key: "name", Value: "app"}, {Key: "source", Value: testFS}, {Key: "path", Value: "testdata"}}) + r := c.Data().ReadFile("app/test.txt") + assert.True(t, r.OK) + assert.Equal(t, "hello from testdata\n", string(r.Value.([]byte))) +} + +func TestData_Get_Good(t *testing.T) { + c := New().Value.(*Core) + c.Data().New(Options{{Key: "name", Value: "brain"}, {Key: "source", Value: testFS}, {Key: "path", Value: "testdata"}}) + gr := c.Data().Get("brain") + assert.True(t, gr.OK) + emb := gr.Value.(*Embed) + + r := emb.Open("test.txt") + assert.True(t, r.OK) + file := r.Value.(io.ReadCloser) + defer file.Close() + content, _ := io.ReadAll(file) + assert.Equal(t, "hello from testdata\n", string(content)) +} + +func TestData_Get_Bad(t *testing.T) { + c := New().Value.(*Core) + r := c.Data().Get("nonexistent") + assert.False(t, r.OK) +} + +func TestData_Mounts_Good(t *testing.T) { + c := New().Value.(*Core) + c.Data().New(Options{{Key: "name", Value: "a"}, {Key: "source", Value: testFS}, {Key: "path", Value: "testdata"}}) + c.Data().New(Options{{Key: "name", Value: "b"}, {Key: "source", Value: testFS}, {Key: "path", Value: "testdata"}}) + mounts := c.Data().Mounts() + assert.Len(t, mounts, 2) +} + +func TestEmbed_Legacy_Good(t *testing.T) { + c := New().Value.(*Core) + c.Data().New(Options{{Key: "name", Value: "app"}, {Key: "source", Value: testFS}, {Key: "path", Value: "testdata"}}) + assert.NotNil(t, c.Embed()) +} + +func TestData_List_Good(t *testing.T) { + c := New().Value.(*Core) + c.Data().New(Options{{Key: "name", Value: "app"}, {Key: "source", Value: testFS}, {Key: "path", Value: "."}}) + r := c.Data().List("app/testdata") + assert.True(t, r.OK) +} + +func TestData_List_Bad(t *testing.T) { + c := New().Value.(*Core) + r := c.Data().List("nonexistent/path") + assert.False(t, r.OK) +} + +func TestData_ListNames_Good(t *testing.T) { + c := New().Value.(*Core) + c.Data().New(Options{{Key: "name", Value: "app"}, {Key: "source", Value: testFS}, {Key: "path", Value: "."}}) + r := c.Data().ListNames("app/testdata") + assert.True(t, r.OK) + assert.Contains(t, r.Value.([]string), "test") +} + +func TestData_Extract_Good(t *testing.T) { + c := New().Value.(*Core) + c.Data().New(Options{{Key: "name", Value: "app"}, {Key: "source", Value: testFS}, {Key: "path", Value: "."}}) + r := c.Data().Extract("app/testdata", t.TempDir(), nil) + assert.True(t, r.OK) +} + +func TestData_Extract_Bad(t *testing.T) { + c := New().Value.(*Core) + r := c.Data().Extract("nonexistent/path", t.TempDir(), nil) + assert.False(t, r.OK) +} diff --git a/docs/cmd/ai/example.md b/docs/cmd/ai/example.md deleted file mode 100644 index b115b09..0000000 --- a/docs/cmd/ai/example.md +++ /dev/null @@ -1,100 +0,0 @@ -# AI Examples - -## Workflow Example - -Complete task management workflow: - -```bash -# 1. List available tasks -core ai tasks --status pending - -# 2. Auto-select and claim a task -core ai task --auto --claim - -# 3. Work on the task... - -# 4. Update progress -core ai task:update abc123 --progress 75 - -# 5. Commit with task reference -core ai task:commit abc123 -m 'implement feature' - -# 6. Create PR -core ai task:pr abc123 - -# 7. Mark complete -core ai task:complete abc123 --output 'Feature implemented and PR created' -``` - -## Task Filtering - -```bash -# By status -core ai tasks --status pending -core ai tasks --status in_progress - -# By priority -core ai tasks --priority critical -core ai tasks --priority high - -# By labels -core ai tasks --labels bug,urgent - -# Combined filters -core ai tasks --status pending --priority high --labels bug -``` - -## Task Updates - -```bash -# Change status -core ai task:update abc123 --status in_progress -core ai task:update abc123 --status blocked - -# Update progress -core ai task:update abc123 --progress 25 -core ai task:update abc123 --progress 50 --notes 'Halfway done' -core ai task:update abc123 --progress 100 -``` - -## Git Integration - -```bash -# Commit with task reference -core ai task:commit abc123 -m 'add authentication' - -# With scope -core ai task:commit abc123 -m 'fix login' --scope auth - -# Commit and push -core ai task:commit abc123 -m 'complete feature' --push - -# Create PR -core ai task:pr abc123 - -# Draft PR -core ai task:pr abc123 --draft - -# PR with labels -core ai task:pr abc123 --labels 'enhancement,ready-for-review' - -# PR to different base -core ai task:pr abc123 --base develop -``` - -## Configuration - -### Environment Variables - -```env -AGENTIC_TOKEN=your-api-token -AGENTIC_BASE_URL=https://agentic.example.com -``` - -### ~/.core/agentic.yaml - -```yaml -token: your-api-token -base_url: https://agentic.example.com -default_project: my-project -``` diff --git a/docs/cmd/ai/index.md b/docs/cmd/ai/index.md deleted file mode 100644 index f6c49be..0000000 --- a/docs/cmd/ai/index.md +++ /dev/null @@ -1,262 +0,0 @@ -# core ai - -AI agent task management and Claude Code integration. - -## Task Management Commands - -| Command | Description | -|---------|-------------| -| `tasks` | List available tasks from core-agentic | -| `task` | View task details or auto-select | -| `task:update` | Update task status or progress | -| `task:complete` | Mark task as completed or failed | -| `task:commit` | Create git commit with task reference | -| `task:pr` | Create GitHub PR linked to task | - -## Claude Integration - -| Command | Description | -|---------|-------------| -| `claude run` | Run Claude Code in current directory | -| `claude config` | Manage Claude configuration | - ---- - -## Configuration - -Task commands load configuration from: -1. Environment variables (`AGENTIC_TOKEN`, `AGENTIC_BASE_URL`) -2. `.env` file in current directory -3. `~/.core/agentic.yaml` - ---- - -## ai tasks - -List available tasks from core-agentic. - -```bash -core ai tasks [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--status` | Filter by status (`pending`, `in_progress`, `completed`, `blocked`) | -| `--priority` | Filter by priority (`critical`, `high`, `medium`, `low`) | -| `--labels` | Filter by labels (comma-separated) | -| `--project` | Filter by project | -| `--limit` | Max number of tasks to return (default: 20) | - -### Examples - -```bash -# List all pending tasks -core ai tasks - -# Filter by status and priority -core ai tasks --status pending --priority high - -# Filter by labels -core ai tasks --labels bug,urgent -``` - ---- - -## ai task - -View task details or auto-select a task. - -```bash -core ai task [task-id] [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--auto` | Auto-select highest priority pending task | -| `--claim` | Claim the task after showing details | -| `--context` | Show gathered context for AI collaboration | - -### Examples - -```bash -# Show task details -core ai task abc123 - -# Show and claim -core ai task abc123 --claim - -# Show with context -core ai task abc123 --context - -# Auto-select highest priority pending task -core ai task --auto -``` - ---- - -## ai task:update - -Update a task's status, progress, or notes. - -```bash -core ai task:update [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--status` | New status (`pending`, `in_progress`, `completed`, `blocked`) | -| `--progress` | Progress percentage (0-100) | -| `--notes` | Notes about the update | - -### Examples - -```bash -# Set task to in progress -core ai task:update abc123 --status in_progress - -# Update progress with notes -core ai task:update abc123 --progress 50 --notes 'Halfway done' -``` - ---- - -## ai task:complete - -Mark a task as completed with optional output and artifacts. - -```bash -core ai task:complete [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--output` | Summary of the completed work | -| `--failed` | Mark the task as failed | -| `--error` | Error message if failed | - -### Examples - -```bash -# Complete successfully -core ai task:complete abc123 --output 'Feature implemented' - -# Mark as failed -core ai task:complete abc123 --failed --error 'Build failed' -``` - ---- - -## ai task:commit - -Create a git commit with a task reference and co-author attribution. - -```bash -core ai task:commit [flags] -``` - -Commit message format: -``` -feat(scope): description - -Task: #123 -Co-Authored-By: Claude -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `-m`, `--message` | Commit message (without task reference) | -| `--scope` | Scope for the commit type (e.g., `auth`, `api`, `ui`) | -| `--push` | Push changes after committing | - -### Examples - -```bash -# Commit with message -core ai task:commit abc123 --message 'add user authentication' - -# With scope -core ai task:commit abc123 -m 'fix login bug' --scope auth - -# Commit and push -core ai task:commit abc123 -m 'update docs' --push -``` - ---- - -## ai task:pr - -Create a GitHub pull request linked to a task. - -```bash -core ai task:pr [flags] -``` - -Requires the GitHub CLI (`gh`) to be installed and authenticated. - -### Flags - -| Flag | Description | -|------|-------------| -| `--title` | PR title (defaults to task title) | -| `--base` | Base branch (defaults to main) | -| `--draft` | Create as draft PR | -| `--labels` | Labels to add (comma-separated) | - -### Examples - -```bash -# Create PR with defaults -core ai task:pr abc123 - -# Custom title -core ai task:pr abc123 --title 'Add authentication feature' - -# Draft PR with labels -core ai task:pr abc123 --draft --labels 'enhancement,needs-review' - -# Target different base branch -core ai task:pr abc123 --base develop -``` - ---- - -## ai claude - -Claude Code integration commands. - -### ai claude run - -Run Claude Code in the current directory. - -```bash -core ai claude run -``` - -### ai claude config - -Manage Claude configuration. - -```bash -core ai claude config -``` - ---- - -## Workflow Example - -See [Workflow Example](example.md#workflow-example) for a complete task management workflow. - -## See Also - -- [dev](../dev/) - Multi-repo workflow commands -- [Claude Code documentation](https://claude.ai/code) diff --git a/docs/cmd/build/example.md b/docs/cmd/build/example.md deleted file mode 100644 index da2f3b4..0000000 --- a/docs/cmd/build/example.md +++ /dev/null @@ -1,83 +0,0 @@ -# Build Examples - -## Quick Start - -```bash -# Auto-detect and build -core build - -# Build for specific platforms -core build --targets linux/amd64,darwin/arm64 - -# CI mode -core build --ci -``` - -## Configuration - -`.core/build.yaml`: - -```yaml -version: 1 - -project: - name: myapp - binary: myapp - -build: - main: ./cmd/myapp - ldflags: - - -s -w - - -X main.version={{.Version}} - -targets: - - os: linux - arch: amd64 - - os: linux - arch: arm64 - - os: darwin - arch: arm64 -``` - -## Cross-Platform Build - -```bash -core build --targets linux/amd64,linux/arm64,darwin/arm64,windows/amd64 -``` - -Output: -``` -dist/ -├── myapp-linux-amd64.tar.gz -├── myapp-linux-arm64.tar.gz -├── myapp-darwin-arm64.tar.gz -├── myapp-windows-amd64.zip -└── CHECKSUMS.txt -``` - -## Code Signing - -```yaml -sign: - enabled: true - gpg: - key: $GPG_KEY_ID - macos: - identity: "Developer ID Application: Your Name (TEAM_ID)" - notarize: true - apple_id: $APPLE_ID - team_id: $APPLE_TEAM_ID - app_password: $APPLE_APP_PASSWORD -``` - -## Docker Build - -```bash -core build --type docker --image ghcr.io/myorg/myapp -``` - -## Wails Desktop App - -```bash -core build --type wails --targets darwin/arm64,windows/amd64 -``` diff --git a/docs/cmd/build/index.md b/docs/cmd/build/index.md deleted file mode 100644 index 6956e65..0000000 --- a/docs/cmd/build/index.md +++ /dev/null @@ -1,176 +0,0 @@ -# core build - -Build Go, Wails, Docker, and LinuxKit projects with automatic project detection. - -## Subcommands - -| Command | Description | -|---------|-------------| -| [sdk](sdk/) | Generate API SDKs from OpenAPI | -| `from-path` | Build from a local directory | -| `pwa` | Build from a live PWA URL | - -## Usage - -```bash -core build [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--type` | Project type: `go`, `wails`, `docker`, `linuxkit`, `taskfile` (auto-detected) | -| `--targets` | Build targets: `linux/amd64,darwin/arm64,windows/amd64` | -| `--output` | Output directory (default: `dist`) | -| `--ci` | CI mode - minimal output with JSON artifact list at the end | -| `--image` | Docker image name (for docker builds) | -| `--config` | Config file path (for linuxkit: YAML config, for docker: Dockerfile) | -| `--format` | Output format for linuxkit (iso-bios, qcow2-bios, raw, vmdk) | -| `--push` | Push Docker image after build (default: false) | -| `--archive` | Create archives (tar.gz for linux/darwin, zip for windows) - default: true | -| `--checksum` | Generate SHA256 checksums and CHECKSUMS.txt - default: true | -| `--no-sign` | Skip all code signing | -| `--notarize` | Enable macOS notarization (requires Apple credentials) | - -## Examples - -### Go Project - -```bash -# Auto-detect and build -core build - -# Build for specific platforms -core build --targets linux/amd64,linux/arm64,darwin/arm64 - -# CI mode -core build --ci -``` - -### Wails Project - -```bash -# Build Wails desktop app -core build --type wails - -# Build for all desktop platforms -core build --type wails --targets darwin/amd64,darwin/arm64,windows/amd64,linux/amd64 -``` - -### Docker Image - -```bash -# Build Docker image -core build --type docker - -# With custom image name -core build --type docker --image ghcr.io/myorg/myapp - -# Build and push to registry -core build --type docker --image ghcr.io/myorg/myapp --push -``` - -### LinuxKit Image - -```bash -# Build LinuxKit ISO -core build --type linuxkit - -# Build with specific format -core build --type linuxkit --config linuxkit.yml --format qcow2-bios -``` - -## Project Detection - -Core automatically detects project type based on files: - -| Files | Type | -|-------|------| -| `wails.json` | Wails | -| `go.mod` | Go | -| `Dockerfile` | Docker | -| `Taskfile.yml` | Taskfile | -| `composer.json` | PHP | -| `package.json` | Node | - -## Output - -Build artifacts are placed in `dist/` by default: - -``` -dist/ -├── myapp-linux-amd64.tar.gz -├── myapp-linux-arm64.tar.gz -├── myapp-darwin-amd64.tar.gz -├── myapp-darwin-arm64.tar.gz -├── myapp-windows-amd64.zip -└── CHECKSUMS.txt -``` - -## Configuration - -Optional `.core/build.yaml` - see [Configuration](example.md#configuration) for examples. - -## Code Signing - -Core supports GPG signing for checksums and native code signing for macOS. - -### GPG Signing - -Signs `CHECKSUMS.txt` with a detached ASCII signature (`.asc`): - -```bash -# Build with GPG signing (default if key configured) -core build - -# Skip signing -core build --no-sign -``` - -Users can verify: - -```bash -gpg --verify CHECKSUMS.txt.asc CHECKSUMS.txt -sha256sum -c CHECKSUMS.txt -``` - -### macOS Code Signing - -Signs Darwin binaries with your Developer ID and optionally notarizes with Apple: - -```bash -# Build with codesign (automatic if identity configured) -core build - -# Build with notarization (takes 1-5 minutes) -core build --notarize -``` - -### Environment Variables - -| Variable | Purpose | -|----------|---------| -| `GPG_KEY_ID` | GPG key ID or fingerprint | -| `CODESIGN_IDENTITY` | macOS Developer ID (fallback) | -| `APPLE_ID` | Apple account email | -| `APPLE_TEAM_ID` | Apple Developer Team ID | -| `APPLE_APP_PASSWORD` | App-specific password for notarization | - -## Building from PWAs and Static Sites - -### Build from Local Directory - -Build a desktop app from static web application files: - -```bash -core build from-path --path ./dist -``` - -### Build from Live PWA - -Build a desktop app from a live Progressive Web App URL: - -```bash -core build pwa --url https://example.com -``` diff --git a/docs/cmd/build/sdk/example.md b/docs/cmd/build/sdk/example.md deleted file mode 100644 index e832308..0000000 --- a/docs/cmd/build/sdk/example.md +++ /dev/null @@ -1,56 +0,0 @@ -# SDK Build Examples - -## Generate All SDKs - -```bash -core build sdk -``` - -## Specific Language - -```bash -core build sdk --lang typescript -core build sdk --lang php -core build sdk --lang go -``` - -## Custom Spec - -```bash -core build sdk --spec ./api/openapi.yaml -``` - -## With Version - -```bash -core build sdk --version v2.0.0 -``` - -## Preview - -```bash -core build sdk --dry-run -``` - -## Configuration - -`.core/sdk.yaml`: - -```yaml -version: 1 - -spec: ./api/openapi.yaml - -languages: - - name: typescript - output: sdk/typescript - package: "@myorg/api-client" - - - name: php - output: sdk/php - namespace: MyOrg\ApiClient - - - name: go - output: sdk/go - module: github.com/myorg/api-client-go -``` diff --git a/docs/cmd/build/sdk/index.md b/docs/cmd/build/sdk/index.md deleted file mode 100644 index 084c5ef..0000000 --- a/docs/cmd/build/sdk/index.md +++ /dev/null @@ -1,27 +0,0 @@ -# core build sdk - -Generate typed API clients from OpenAPI specifications. Supports TypeScript, Python, Go, and PHP. - -## Usage - -```bash -core build sdk [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--spec` | Path to OpenAPI spec file | -| `--lang` | Generate only this language (typescript, python, go, php) | -| `--version` | Version to embed in generated SDKs | -| `--dry-run` | Show what would be generated without writing files | - -## Examples - -```bash -core build sdk # Generate all -core build sdk --lang typescript # TypeScript only -core build sdk --spec ./api.yaml # Custom spec -core build sdk --dry-run # Preview -``` diff --git a/docs/cmd/ci/changelog/example.md b/docs/cmd/ci/changelog/example.md deleted file mode 100644 index 101cad7..0000000 --- a/docs/cmd/ci/changelog/example.md +++ /dev/null @@ -1,36 +0,0 @@ -# CI Changelog Examples - -```bash -core ci changelog -``` - -## Output - -```markdown -## v1.2.0 - -### Features -- Add user authentication (#123) -- Support dark mode (#124) - -### Bug Fixes -- Fix memory leak in worker (#125) - -### Performance -- Optimize database queries (#126) -``` - -## Configuration - -`.core/release.yaml`: - -```yaml -changelog: - include: - - feat - - fix - - perf - exclude: - - chore - - docs -``` diff --git a/docs/cmd/ci/changelog/index.md b/docs/cmd/ci/changelog/index.md deleted file mode 100644 index ffc0712..0000000 --- a/docs/cmd/ci/changelog/index.md +++ /dev/null @@ -1,28 +0,0 @@ -# core ci changelog - -Generate changelog from conventional commits. - -## Usage - -```bash -core ci changelog -``` - -## Output - -Generates markdown changelog from git commits since last tag: - -```markdown -## v1.2.0 - -### Features -- Add user authentication (#123) -- Support dark mode (#124) - -### Bug Fixes -- Fix memory leak in worker (#125) -``` - -## Configuration - -See [configuration.md](../../../configuration.md) for changelog configuration options. diff --git a/docs/cmd/ci/example.md b/docs/cmd/ci/example.md deleted file mode 100644 index faf4720..0000000 --- a/docs/cmd/ci/example.md +++ /dev/null @@ -1,90 +0,0 @@ -# CI Examples - -## Quick Start - -```bash -# Build first -core build - -# Preview release -core ci - -# Publish -core ci --we-are-go-for-launch -``` - -## Configuration - -`.core/release.yaml`: - -```yaml -version: 1 - -project: - name: myapp - repository: host-uk/myapp - -publishers: - - type: github -``` - -## Publisher Examples - -### GitHub + Docker - -```yaml -publishers: - - type: github - - - type: docker - registry: ghcr.io - image: host-uk/myapp - platforms: - - linux/amd64 - - linux/arm64 - tags: - - latest - - "{{.Version}}" -``` - -### Full Stack (GitHub + npm + Homebrew) - -```yaml -publishers: - - type: github - - - type: npm - package: "@host-uk/myapp" - access: public - - - type: homebrew - tap: host-uk/homebrew-tap -``` - -### LinuxKit Image - -```yaml -publishers: - - type: linuxkit - config: .core/linuxkit/server.yml - formats: - - iso - - qcow2 - platforms: - - linux/amd64 - - linux/arm64 -``` - -## Changelog Configuration - -```yaml -changelog: - include: - - feat - - fix - - perf - exclude: - - chore - - docs - - test -``` diff --git a/docs/cmd/ci/index.md b/docs/cmd/ci/index.md deleted file mode 100644 index ee2c759..0000000 --- a/docs/cmd/ci/index.md +++ /dev/null @@ -1,79 +0,0 @@ -# core ci - -Publish releases to GitHub, Docker, npm, Homebrew, and more. - -**Safety:** Dry-run by default. Use `--we-are-go-for-launch` to actually publish. - -## Subcommands - -| Command | Description | -|---------|-------------| -| [init](init/) | Initialize release config | -| [changelog](changelog/) | Generate changelog | -| [version](version/) | Show determined version | - -## Usage - -```bash -core ci [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--we-are-go-for-launch` | Actually publish (default is dry-run) | -| `--version` | Override version | -| `--draft` | Create as draft release | -| `--prerelease` | Mark as prerelease | - -## Examples - -```bash -# Preview what would be published (safe) -core ci - -# Actually publish -core ci --we-are-go-for-launch - -# Publish as draft -core ci --we-are-go-for-launch --draft - -# Publish as prerelease -core ci --we-are-go-for-launch --prerelease -``` - -## Workflow - -Build and publish are **separated** to prevent accidents: - -```bash -# Step 1: Build artifacts -core build -core build sdk - -# Step 2: Preview (dry-run by default) -core ci - -# Step 3: Publish (explicit flag required) -core ci --we-are-go-for-launch -``` - -## Publishers - -See [Publisher Examples](example.md#publisher-examples) for configuration. - -| Type | Target | -|------|--------| -| `github` | GitHub Releases | -| `docker` | Container registries | -| `linuxkit` | LinuxKit images | -| `npm` | npm registry | -| `homebrew` | Homebrew tap | -| `scoop` | Scoop bucket | -| `aur` | Arch User Repository | -| `chocolatey` | Chocolatey | - -## Changelog - -Auto-generated from conventional commits. See [Changelog Configuration](example.md#changelog-configuration). diff --git a/docs/cmd/ci/init/example.md b/docs/cmd/ci/init/example.md deleted file mode 100644 index 8f76ab9..0000000 --- a/docs/cmd/ci/init/example.md +++ /dev/null @@ -1,17 +0,0 @@ -# CI Init Examples - -```bash -core ci init -``` - -Creates `.core/release.yaml`: - -```yaml -version: 1 - -project: - name: myapp - -publishers: - - type: github -``` diff --git a/docs/cmd/ci/init/index.md b/docs/cmd/ci/init/index.md deleted file mode 100644 index 23ba068..0000000 --- a/docs/cmd/ci/init/index.md +++ /dev/null @@ -1,11 +0,0 @@ -# core ci init - -Initialize release configuration. - -## Usage - -```bash -core ci init -``` - -Creates `.core/release.yaml` with default configuration. See [Configuration](../example.md#configuration) for output format. diff --git a/docs/cmd/ci/version/example.md b/docs/cmd/ci/version/example.md deleted file mode 100644 index e669d65..0000000 --- a/docs/cmd/ci/version/example.md +++ /dev/null @@ -1,18 +0,0 @@ -# CI Version Examples - -```bash -core ci version -``` - -## Output - -``` -v1.2.0 -``` - -## Version Resolution - -1. `--version` flag (if provided) -2. Git tag on HEAD -3. Latest git tag + increment -4. `v0.0.1` (no tags) diff --git a/docs/cmd/ci/version/index.md b/docs/cmd/ci/version/index.md deleted file mode 100644 index 7014a34..0000000 --- a/docs/cmd/ci/version/index.md +++ /dev/null @@ -1,21 +0,0 @@ -# core ci version - -Show the determined release version. - -## Usage - -```bash -core ci version -``` - -## Output - -``` -v1.2.0 -``` - -Version is determined from: -1. `--version` flag (if provided) -2. Git tag on HEAD -3. Latest git tag + increment -4. `v0.0.1` (if no tags exist) diff --git a/docs/cmd/dev/ci/index.md b/docs/cmd/dev/ci/index.md deleted file mode 100644 index 0cf8442..0000000 --- a/docs/cmd/dev/ci/index.md +++ /dev/null @@ -1,61 +0,0 @@ -# core dev ci - -Check CI status across all repositories. - -Fetches GitHub Actions workflow status for all repos. Shows latest run status for each repo. Requires the `gh` CLI to be installed and authenticated. - -## Usage - -```bash -core dev ci [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to repos.yaml (auto-detected if not specified) | -| `--branch` | Filter by branch (default: main) | -| `--failed` | Show only failed runs | - -## Examples - -```bash -# Check CI status for all repos -core dev ci - -# Check specific branch -core dev ci --branch develop - -# Show only failures -core dev ci --failed -``` - -## Output - -``` -core-php ✓ passing 2m ago -core-tenant ✓ passing 5m ago -core-admin ✗ failed 12m ago -core-api ⏳ running now -core-bio ✓ passing 1h ago -``` - -## Status Icons - -| Symbol | Meaning | -|--------|---------| -| `✓` | Passing | -| `✗` | Failed | -| `⏳` | Running | -| `-` | No runs | - -## Requirements - -- GitHub CLI (`gh`) must be installed -- Must be authenticated: `gh auth login` - -## See Also - -- [issues command](../issues/) - List open issues -- [reviews command](../reviews/) - List PRs needing review diff --git a/docs/cmd/dev/commit/index.md b/docs/cmd/dev/commit/index.md deleted file mode 100644 index 4258fb1..0000000 --- a/docs/cmd/dev/commit/index.md +++ /dev/null @@ -1,46 +0,0 @@ -# core dev commit - -Claude-assisted commits across repositories. - -Uses Claude to create commits for dirty repos. Shows uncommitted changes and invokes Claude to generate commit messages. - -## Usage - -```bash -core dev commit [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to repos.yaml (auto-detected if not specified) | -| `--all` | Commit all dirty repos without prompting | - -## Examples - -```bash -# Interactive commit (prompts for each repo) -core dev commit - -# Commit all dirty repos automatically -core dev commit --all - -# Use specific registry -core dev commit --registry ~/projects/repos.yaml -``` - -## How It Works - -1. Scans all repositories for uncommitted changes -2. For each dirty repo: - - Shows the diff - - Invokes Claude to generate a commit message - - Creates the commit with `Co-Authored-By: Claude` -3. Reports success/failure for each repo - -## See Also - -- [health command](../health/) - Check repo status -- [push command](../push/) - Push commits after committing -- [work command](../work/) - Full workflow (status + commit + push) diff --git a/docs/cmd/dev/example.md b/docs/cmd/dev/example.md deleted file mode 100644 index da75b5e..0000000 --- a/docs/cmd/dev/example.md +++ /dev/null @@ -1,203 +0,0 @@ -# Dev Examples - -## Multi-Repo Workflow - -```bash -# Quick status -core dev health - -# Detailed breakdown -core dev health --verbose - -# Full workflow -core dev work - -# Status only -core dev work --status - -# Commit and push -core dev work --commit - -# Commit dirty repos -core dev commit - -# Commit all without prompting -core dev commit --all - -# Push unpushed -core dev push - -# Push without confirmation -core dev push --force - -# Pull behind repos -core dev pull - -# Pull all repos -core dev pull --all -``` - -## GitHub Integration - -```bash -# Open issues -core dev issues - -# Filter by assignee -core dev issues --assignee @me - -# Limit results -core dev issues --limit 5 - -# PRs needing review -core dev reviews - -# All PRs including drafts -core dev reviews --all - -# Filter by author -core dev reviews --author username - -# CI status -core dev ci - -# Only failed runs -core dev ci --failed - -# Specific branch -core dev ci --branch develop -``` - -## Dependency Analysis - -```bash -# What depends on core-php? -core dev impact core-php -``` - -## Task Management - -```bash -# List tasks -core ai tasks - -# Filter by status and priority -core ai tasks --status pending --priority high - -# Filter by labels -core ai tasks --labels bug,urgent - -# Show task details -core ai task abc123 - -# Auto-select highest priority task -core ai task --auto - -# Claim a task -core ai task abc123 --claim - -# Update task status -core ai task:update abc123 --status in_progress - -# Add progress notes -core ai task:update abc123 --progress 50 --notes 'Halfway done' - -# Complete a task -core ai task:complete abc123 --output 'Feature implemented' - -# Mark as failed -core ai task:complete abc123 --failed --error 'Build failed' - -# Commit with task reference -core ai task:commit abc123 -m 'add user authentication' - -# Commit with scope and push -core ai task:commit abc123 -m 'fix login bug' --scope auth --push - -# Create PR for task -core ai task:pr abc123 - -# Create draft PR with labels -core ai task:pr abc123 --draft --labels 'enhancement,needs-review' -``` - -## Service API Management - -```bash -# Synchronize public service APIs -core dev sync - -# Or using the api command -core dev api sync -``` - -## Dev Environment - -```bash -# First time setup -core dev install -core dev boot - -# Open shell -core dev shell - -# Mount and serve -core dev serve - -# Run tests -core dev test - -# Sandboxed Claude -core dev claude -``` - -## Configuration - -### repos.yaml - -```yaml -org: host-uk -repos: - core-php: - type: package - description: Foundation framework - core-tenant: - type: package - depends: [core-php] -``` - -### ~/.core/config.yaml - -```yaml -version: 1 - -images: - source: auto # auto | github | registry | cdn - - cdn: - url: https://images.example.com/core-devops - - github: - repo: host-uk/core-images - - registry: - image: ghcr.io/host-uk/core-devops -``` - -### .core/test.yaml - -```yaml -version: 1 - -commands: - - name: unit - run: vendor/bin/pest --parallel - - name: types - run: vendor/bin/phpstan analyse - - name: lint - run: vendor/bin/pint --test - -env: - APP_ENV: testing - DB_CONNECTION: sqlite -``` diff --git a/docs/cmd/dev/health/index.md b/docs/cmd/dev/health/index.md deleted file mode 100644 index d104689..0000000 --- a/docs/cmd/dev/health/index.md +++ /dev/null @@ -1,52 +0,0 @@ -# core dev health - -Quick health check across all repositories. - -Shows a summary of repository health: total repos, dirty repos, unpushed commits, etc. - -## Usage - -```bash -core dev health [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to repos.yaml (auto-detected if not specified) | -| `--verbose` | Show detailed breakdown | - -## Examples - -```bash -# Quick health summary -core dev health - -# Detailed breakdown -core dev health --verbose - -# Use specific registry -core dev health --registry ~/projects/repos.yaml -``` - -## Output - -``` -18 repos │ 2 dirty │ 1 ahead │ all synced -``` - -With `--verbose`: - -``` -Repos: 18 -Dirty: 2 (core-php, core-admin) -Ahead: 1 (core-tenant) -Behind: 0 -Synced: ✓ -``` - -## See Also - -- [work command](../work/) - Full workflow (status + commit + push) -- [commit command](../commit/) - Claude-assisted commits diff --git a/docs/cmd/dev/impact/index.md b/docs/cmd/dev/impact/index.md deleted file mode 100644 index ac96e04..0000000 --- a/docs/cmd/dev/impact/index.md +++ /dev/null @@ -1,65 +0,0 @@ -# core dev impact - -Show impact of changing a repository. - -Analyses the dependency graph to show which repos would be affected by changes to the specified repo. - -## Usage - -```bash -core dev impact [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to repos.yaml (auto-detected if not specified) | - -## Examples - -```bash -# Show what depends on core-php -core dev impact core-php - -# Show what depends on core-tenant -core dev impact core-tenant -``` - -## Output - -``` -Impact of changes to core-php: - -Direct dependents (5): - core-tenant - core-admin - core-api - core-mcp - core-commerce - -Indirect dependents (12): - core-bio (via core-tenant) - core-social (via core-tenant) - core-analytics (via core-tenant) - core-notify (via core-tenant) - core-trust (via core-tenant) - core-support (via core-tenant) - core-content (via core-tenant) - core-developer (via core-tenant) - core-agentic (via core-mcp) - ... - -Total: 17 repos affected -``` - -## Use Cases - -- Before making breaking changes, see what needs updating -- Plan release order based on dependency graph -- Understand the ripple effect of changes - -## See Also - -- [health command](../health/) - Quick repo status -- [setup command](../../setup/) - Clone repos with dependencies diff --git a/docs/cmd/dev/index.md b/docs/cmd/dev/index.md deleted file mode 100644 index 56a5090..0000000 --- a/docs/cmd/dev/index.md +++ /dev/null @@ -1,388 +0,0 @@ -# core dev - -Multi-repo workflow and portable development environment. - -## Multi-Repo Commands - -| Command | Description | -|---------|-------------| -| [work](work/) | Full workflow: status + commit + push | -| `health` | Quick health check across repos | -| `commit` | Claude-assisted commits | -| `push` | Push repos with unpushed commits | -| `pull` | Pull repos that are behind | -| `issues` | List open issues | -| `reviews` | List PRs needing review | -| `ci` | Check CI status | -| `impact` | Show dependency impact | -| `api` | Tools for managing service APIs | -| `sync` | Synchronize public service APIs | - -## Task Management Commands - -> **Note:** Task management commands have moved to [`core ai`](../ai/). - -| Command | Description | -|---------|-------------| -| [`ai tasks`](../ai/) | List available tasks from core-agentic | -| [`ai task`](../ai/) | Show task details or auto-select a task | -| [`ai task:update`](../ai/) | Update task status or progress | -| [`ai task:complete`](../ai/) | Mark a task as completed | -| [`ai task:commit`](../ai/) | Auto-commit changes with task reference | -| [`ai task:pr`](../ai/) | Create a pull request for a task | - -## Dev Environment Commands - -| Command | Description | -|---------|-------------| -| `install` | Download the core-devops image | -| `boot` | Start the environment | -| `stop` | Stop the environment | -| `status` | Show status | -| `shell` | Open shell | -| `serve` | Start dev server | -| `test` | Run tests | -| `claude` | Sandboxed Claude | -| `update` | Update image | - ---- - -## Dev Environment Overview - -Core DevOps provides a sandboxed, immutable development environment based on LinuxKit with 100+ embedded tools. - -## Quick Start - -```bash -# First time setup -core dev install -core dev boot - -# Open shell -core dev shell - -# Or mount current project and serve -core dev serve -``` - -## dev install - -Download the core-devops image for your platform. - -```bash -core dev install -``` - -Downloads the platform-specific dev environment image including Go, PHP, Node.js, Python, Docker, and Claude CLI. Downloads are cached at `~/.core/images/`. - -### Examples - -```bash -# Download image (auto-detects platform) -core dev install -``` - -## dev boot - -Start the development environment. - -```bash -core dev boot [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--memory` | Memory allocation in MB (default: 4096) | -| `--cpus` | Number of CPUs (default: 2) | -| `--fresh` | Stop existing and start fresh | - -### Examples - -```bash -# Start with defaults -core dev boot - -# More resources -core dev boot --memory 8192 --cpus 4 - -# Fresh start -core dev boot --fresh -``` - -## dev shell - -Open a shell in the running environment. - -```bash -core dev shell [flags] [-- command] -``` - -Uses SSH by default, or serial console with `--console`. - -### Flags - -| Flag | Description | -|------|-------------| -| `--console` | Use serial console instead of SSH | - -### Examples - -```bash -# SSH into environment -core dev shell - -# Serial console (for debugging) -core dev shell --console - -# Run a command -core dev shell -- ls -la -``` - -## dev serve - -Mount current directory and start the appropriate dev server. - -```bash -core dev serve [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--port` | Port to expose (default: 8000) | -| `--path` | Subdirectory to serve | - -### Auto-Detection - -| Project | Server Command | -|---------|---------------| -| Laravel (`artisan`) | `php artisan octane:start` | -| Node (`package.json` with `dev` script) | `npm run dev` | -| PHP (`composer.json`) | `frankenphp php-server` | -| Other | `python -m http.server` | - -### Examples - -```bash -# Auto-detect and serve -core dev serve - -# Custom port -core dev serve --port 3000 -``` - -## dev test - -Run tests inside the environment. - -```bash -core dev test [flags] [-- custom command] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--name` | Run named test command from `.core/test.yaml` | - -### Test Detection - -Core auto-detects the test framework or uses `.core/test.yaml`: - -1. `.core/test.yaml` - Custom config -2. `composer.json` → `composer test` -3. `package.json` → `npm test` -4. `go.mod` → `go test ./...` -5. `pytest.ini` → `pytest` -6. `Taskfile.yaml` → `task test` - -### Examples - -```bash -# Auto-detect and run tests -core dev test - -# Run named test from config -core dev test --name integration - -# Custom command -core dev test -- go test -v ./pkg/... -``` - -### Test Configuration - -Create `.core/test.yaml` for custom test setup - see [Configuration](example.md#configuration) for examples. - -## dev claude - -Start a sandboxed Claude session with your project mounted. - -```bash -core dev claude [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--model` | Model to use (`opus`, `sonnet`) | -| `--no-auth` | Don't forward any auth credentials | -| `--auth` | Selective auth forwarding (`gh`, `anthropic`, `ssh`, `git`) | - -### What Gets Forwarded - -By default, these are forwarded to the sandbox: -- `~/.anthropic/` or `ANTHROPIC_API_KEY` -- `~/.config/gh/` (GitHub CLI auth) -- SSH agent -- Git config (name, email) - -### Examples - -```bash -# Full auth forwarding (default) -core dev claude - -# Use Opus model -core dev claude --model opus - -# Clean sandbox -core dev claude --no-auth - -# Only GitHub and Anthropic auth -core dev claude --auth gh,anthropic -``` - -### Why Use This? - -- **Immutable base** - Reset anytime with `core dev boot --fresh` -- **Safe experimentation** - Claude can install packages, make mistakes -- **Host system untouched** - All changes stay in the sandbox -- **Real credentials** - Can still push code, create PRs -- **Full tooling** - 100+ tools available in the image - -## dev status - -Show the current state of the development environment. - -```bash -core dev status -``` - -Output includes: -- Running/stopped state -- Resource usage (CPU, memory) -- Exposed ports -- Mounted directories - -## dev update - -Check for and apply updates. - -```bash -core dev update [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--apply` | Download and apply the update | - -### Examples - -```bash -# Check for updates -core dev update - -# Apply available update -core dev update --apply -``` - -## Embedded Tools - -The core-devops image includes 100+ tools: - -| Category | Tools | -|----------|-------| -| **AI/LLM** | claude, gemini, aider, ollama, llm | -| **VCS** | git, gh, glab, lazygit, delta, git-lfs | -| **Runtimes** | frankenphp, node, bun, deno, go, python3, rustc | -| **Package Mgrs** | composer, npm, pnpm, yarn, pip, uv, cargo | -| **Build** | task, make, just, nx, turbo | -| **Linting** | pint, phpstan, prettier, eslint, biome, golangci-lint, ruff | -| **Testing** | phpunit, pest, vitest, playwright, k6 | -| **Infra** | docker, kubectl, k9s, helm, terraform, ansible | -| **Databases** | sqlite3, mysql, psql, redis-cli, mongosh, usql | -| **HTTP/Net** | curl, httpie, xh, websocat, grpcurl, mkcert, ngrok | -| **Data** | jq, yq, fx, gron, miller, dasel | -| **Security** | age, sops, cosign, trivy, trufflehog, vault | -| **Files** | fd, rg, fzf, bat, eza, tree, zoxide, broot | -| **Editors** | nvim, helix, micro | - -## Configuration - -Global config in `~/.core/config.yaml` - see [Configuration](example.md#configuration) for examples. - -## Image Storage - -Images are stored in `~/.core/images/`: - -``` -~/.core/ -├── config.yaml -└── images/ - ├── core-devops-darwin-arm64.qcow2 - ├── core-devops-linux-amd64.qcow2 - └── manifest.json -``` - -## Multi-Repo Commands - -See the [work](work/) page for detailed documentation on multi-repo commands. - -### dev ci - -Check GitHub Actions workflow status across all repos. - -```bash -core dev ci [flags] -``` - -#### Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to `repos.yaml` (auto-detected if not specified) | -| `--branch` | Filter by branch (default: main) | -| `--failed` | Show only failed runs | - -Requires the `gh` CLI to be installed and authenticated. - -### dev api - -Tools for managing service APIs. - -```bash -core dev api sync -``` - -Synchronizes the public service APIs with their internal implementations. - -### dev sync - -Alias for `core dev api sync`. Synchronizes the public service APIs with their internal implementations. - -```bash -core dev sync -``` - -This command scans the `pkg` directory for services and ensures that the top-level public API for each service is in sync with its internal implementation. It automatically generates the necessary Go files with type aliases. - -## See Also - -- [work](work/) - Multi-repo workflow commands (`core dev work`, `core dev health`, etc.) -- [ai](../ai/) - Task management commands (`core ai tasks`, `core ai task`, etc.) diff --git a/docs/cmd/dev/issues/index.md b/docs/cmd/dev/issues/index.md deleted file mode 100644 index 36091eb..0000000 --- a/docs/cmd/dev/issues/index.md +++ /dev/null @@ -1,57 +0,0 @@ -# core dev issues - -List open issues across all repositories. - -Fetches open issues from GitHub for all repos in the registry. Requires the `gh` CLI to be installed and authenticated. - -## Usage - -```bash -core dev issues [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to repos.yaml (auto-detected if not specified) | -| `--assignee` | Filter by assignee (use `@me` for yourself) | -| `--limit` | Max issues per repo (default 10) | - -## Examples - -```bash -# List all open issues -core dev issues - -# Show issues assigned to you -core dev issues --assignee @me - -# Limit to 5 issues per repo -core dev issues --limit 5 - -# Filter by specific assignee -core dev issues --assignee username -``` - -## Output - -``` -core-php (3 issues) - #42 Add retry logic to HTTP client bug - #38 Update documentation for v2 API docs - #35 Support custom serializers enhancement - -core-tenant (1 issue) - #12 Workspace isolation bug bug, critical -``` - -## Requirements - -- GitHub CLI (`gh`) must be installed -- Must be authenticated: `gh auth login` - -## See Also - -- [reviews command](../reviews/) - List PRs needing review -- [ci command](../ci/) - Check CI status diff --git a/docs/cmd/dev/pull/index.md b/docs/cmd/dev/pull/index.md deleted file mode 100644 index 1f6f3df..0000000 --- a/docs/cmd/dev/pull/index.md +++ /dev/null @@ -1,47 +0,0 @@ -# core dev pull - -Pull updates across all repositories. - -Pulls updates for all repos. By default only pulls repos that are behind. Use `--all` to pull all repos. - -## Usage - -```bash -core dev pull [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to repos.yaml (auto-detected if not specified) | -| `--all` | Pull all repos, not just those behind | - -## Examples - -```bash -# Pull only repos that are behind -core dev pull - -# Pull all repos -core dev pull --all - -# Use specific registry -core dev pull --registry ~/projects/repos.yaml -``` - -## Output - -``` -Pulling 2 repo(s) that are behind: - ✓ core-php (3 commits) - ✓ core-tenant (1 commit) - -Done: 2 pulled -``` - -## See Also - -- [push command](../push/) - Push local commits -- [health command](../health/) - Check sync status -- [work command](../work/) - Full workflow diff --git a/docs/cmd/dev/push/index.md b/docs/cmd/dev/push/index.md deleted file mode 100644 index 0c11195..0000000 --- a/docs/cmd/dev/push/index.md +++ /dev/null @@ -1,52 +0,0 @@ -# core dev push - -Push commits across all repositories. - -Pushes unpushed commits for all repos. Shows repos with commits to push and confirms before pushing. - -## Usage - -```bash -core dev push [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to repos.yaml (auto-detected if not specified) | -| `--force` | Skip confirmation prompt | - -## Examples - -```bash -# Push with confirmation -core dev push - -# Push without confirmation -core dev push --force - -# Use specific registry -core dev push --registry ~/projects/repos.yaml -``` - -## Output - -``` -3 repo(s) with unpushed commits: - core-php: 2 commit(s) - core-admin: 1 commit(s) - core-tenant: 1 commit(s) - -Push all? [y/N] y - - ✓ core-php - ✓ core-admin - ✓ core-tenant -``` - -## See Also - -- [commit command](../commit/) - Create commits before pushing -- [pull command](../pull/) - Pull updates from remote -- [work command](../work/) - Full workflow (status + commit + push) diff --git a/docs/cmd/dev/reviews/index.md b/docs/cmd/dev/reviews/index.md deleted file mode 100644 index 44c09ad..0000000 --- a/docs/cmd/dev/reviews/index.md +++ /dev/null @@ -1,61 +0,0 @@ -# core dev reviews - -List PRs needing review across all repositories. - -Fetches open PRs from GitHub for all repos in the registry. Shows review status (approved, changes requested, pending). Requires the `gh` CLI to be installed and authenticated. - -## Usage - -```bash -core dev reviews [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to repos.yaml (auto-detected if not specified) | -| `--all` | Show all PRs including drafts | -| `--author` | Filter by PR author | - -## Examples - -```bash -# List PRs needing review -core dev reviews - -# Include draft PRs -core dev reviews --all - -# Filter by author -core dev reviews --author username -``` - -## Output - -``` -core-php (2 PRs) - #45 feat: Add caching layer ✓ approved @alice - #43 fix: Memory leak in worker ⏳ pending @bob - -core-admin (1 PR) - #28 refactor: Extract components ✗ changes @charlie -``` - -## Review Status - -| Symbol | Meaning | -|--------|---------| -| `✓` | Approved | -| `⏳` | Pending review | -| `✗` | Changes requested | - -## Requirements - -- GitHub CLI (`gh`) must be installed -- Must be authenticated: `gh auth login` - -## See Also - -- [issues command](../issues/) - List open issues -- [ci command](../ci/) - Check CI status diff --git a/docs/cmd/dev/work/example.md b/docs/cmd/dev/work/example.md deleted file mode 100644 index 74db3fb..0000000 --- a/docs/cmd/dev/work/example.md +++ /dev/null @@ -1,33 +0,0 @@ -# Dev Work Examples - -```bash -# Full workflow: status → commit → push -core dev work - -# Status only -core dev work --status -``` - -## Output - -``` -┌─────────────┬────────┬──────────┬─────────┐ -│ Repo │ Branch │ Status │ Behind │ -├─────────────┼────────┼──────────┼─────────┤ -│ core-php │ main │ clean │ 0 │ -│ core-tenant │ main │ 2 files │ 0 │ -│ core-admin │ dev │ clean │ 3 │ -└─────────────┴────────┴──────────┴─────────┘ -``` - -## Registry - -```yaml -repos: - - name: core - path: ./core - url: https://github.com/host-uk/core - - name: core-php - path: ./core-php - url: https://github.com/host-uk/core-php -``` diff --git a/docs/cmd/dev/work/index.md b/docs/cmd/dev/work/index.md deleted file mode 100644 index 454fe22..0000000 --- a/docs/cmd/dev/work/index.md +++ /dev/null @@ -1,293 +0,0 @@ -# core dev work - -Multi-repo git operations for managing the host-uk organization. - -## Overview - -The `core dev work` command and related subcommands help manage multiple repositories in the host-uk ecosystem simultaneously. - -## Commands - -| Command | Description | -|---------|-------------| -| `core dev work` | Full workflow: status + commit + push | -| `core dev work --status` | Status table only | -| `core dev work --commit` | Use Claude to commit dirty repos | -| `core dev health` | Quick health check across all repos | -| `core dev commit` | Claude-assisted commits across repos | -| `core dev push` | Push commits across all repos | -| `core dev pull` | Pull updates across all repos | -| `core dev issues` | List open issues across all repos | -| `core dev reviews` | List PRs needing review | -| `core dev ci` | Check CI status across all repos | -| `core dev impact` | Show impact of changing a repo | - -## core dev work - -Manage git status, commits, and pushes across multiple repositories. - -```bash -core dev work [flags] -``` - -Reads `repos.yaml` to discover repositories and their relationships. Shows status, optionally commits with Claude, and pushes changes. - -### Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to `repos.yaml` (auto-detected if not specified) | -| `--status` | Show status only, don't push | -| `--commit` | Use Claude to commit dirty repos before pushing | - -### Examples - -```bash -# Full workflow -core dev work - -# Status only -core dev work --status - -# Commit and push -core dev work --commit -``` - -## core dev health - -Quick health check showing summary of repository health across all repos. - -```bash -core dev health [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to `repos.yaml` (auto-detected if not specified) | -| `--verbose` | Show detailed breakdown | - -Output shows: -- Total repos -- Dirty repos -- Unpushed commits -- Repos behind remote - -### Examples - -```bash -# Quick summary -core dev health - -# Detailed breakdown -core dev health --verbose -``` - -## core dev issues - -List open issues across all repositories. - -```bash -core dev issues [flags] -``` - -Fetches open issues from GitHub for all repos in the registry. Requires the `gh` CLI to be installed and authenticated. - -### Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to `repos.yaml` (auto-detected if not specified) | -| `--assignee` | Filter by assignee (use `@me` for yourself) | -| `--limit` | Max issues per repo (default: 10) | - -### Examples - -```bash -# List all open issues -core dev issues - -# Filter by assignee -core dev issues --assignee @me - -# Limit results -core dev issues --limit 5 -``` - -## core dev reviews - -List pull requests needing review across all repos. - -```bash -core dev reviews [flags] -``` - -Fetches open PRs from GitHub for all repos in the registry. Shows review status (approved, changes requested, pending). Requires the `gh` CLI to be installed and authenticated. - -### Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to `repos.yaml` (auto-detected if not specified) | -| `--all` | Show all PRs including drafts | -| `--author` | Filter by PR author | - -### Examples - -```bash -# List PRs needing review -core dev reviews - -# Show all PRs including drafts -core dev reviews --all - -# Filter by author -core dev reviews --author username -``` - -## core dev commit - -Create commits across repos with Claude assistance. - -```bash -core dev commit [flags] -``` - -Uses Claude to create commits for dirty repos. Shows uncommitted changes and invokes Claude to generate commit messages. - -### Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to `repos.yaml` (auto-detected if not specified) | -| `--all` | Commit all dirty repos without prompting | - -### Examples - -```bash -# Commit with prompts -core dev commit - -# Commit all automatically -core dev commit --all -``` - -## core dev push - -Push commits across all repos. - -```bash -core dev push [flags] -``` - -Pushes unpushed commits for all repos. Shows repos with commits to push and confirms before pushing. - -### Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to `repos.yaml` (auto-detected if not specified) | -| `--force` | Skip confirmation prompt | - -### Examples - -```bash -# Push with confirmation -core dev push - -# Skip confirmation -core dev push --force -``` - -## core dev pull - -Pull updates across all repos. - -```bash -core dev pull [flags] -``` - -Pulls updates for all repos. By default only pulls repos that are behind. Use `--all` to pull all repos. - -### Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to `repos.yaml` (auto-detected if not specified) | -| `--all` | Pull all repos, not just those behind | - -### Examples - -```bash -# Pull repos that are behind -core dev pull - -# Pull all repos -core dev pull --all -``` - -## core dev ci - -Check GitHub Actions workflow status across all repos. - -```bash -core dev ci [flags] -``` - -Fetches GitHub Actions workflow status for all repos. Shows latest run status for each repo. Requires the `gh` CLI to be installed and authenticated. - -### Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to `repos.yaml` (auto-detected if not specified) | -| `--branch` | Filter by branch (default: main) | -| `--failed` | Show only failed runs | - -### Examples - -```bash -# Show CI status for all repos -core dev ci - -# Show only failed runs -core dev ci --failed - -# Check specific branch -core dev ci --branch develop -``` - -## core dev impact - -Show the impact of changing a repository. - -```bash -core dev impact [flags] -``` - -Analyzes the dependency graph to show which repos would be affected by changes to the specified repo. - -### Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to `repos.yaml` (auto-detected if not specified) | - -### Examples - -```bash -# Show impact of changing core-php -core dev impact core-php -``` - -## Registry - -These commands use `repos.yaml` to know which repos to manage. See [repos.yaml](../../../configuration.md#reposyaml) for format. - -Use `core setup` to clone all repos from the registry. - -## See Also - -- [setup command](../../setup/) - Clone repos from registry -- [search command](../../pkg/search/) - Find and install repos diff --git a/docs/cmd/docs/example.md b/docs/cmd/docs/example.md deleted file mode 100644 index 7729970..0000000 --- a/docs/cmd/docs/example.md +++ /dev/null @@ -1,14 +0,0 @@ -# Docs Examples - -## List - -```bash -core docs list -``` - -## Sync - -```bash -core docs sync -core docs sync --output ./docs -``` diff --git a/docs/cmd/docs/index.md b/docs/cmd/docs/index.md deleted file mode 100644 index d73ebf0..0000000 --- a/docs/cmd/docs/index.md +++ /dev/null @@ -1,110 +0,0 @@ -# core docs - -Documentation management across repositories. - -## Usage - -```bash -core docs [flags] -``` - -## Commands - -| Command | Description | -|---------|-------------| -| `list` | List documentation across repos | -| `sync` | Sync documentation to output directory | - -## docs list - -Show documentation coverage across all repos. - -```bash -core docs list [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to repos.yaml | - -### Output - -``` -Repo README CLAUDE CHANGELOG docs/ -────────────────────────────────────────────────────────────────────── -core ✓ ✓ — 12 files -core-php ✓ ✓ ✓ 8 files -core-images ✓ — — — - -Coverage: 3 with docs, 0 without -``` - -## docs sync - -Sync documentation from all repos to an output directory. - -```bash -core docs sync [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to repos.yaml | -| `--output` | Output directory (default: ./docs-build) | -| `--dry-run` | Show what would be synced | - -### Output Structure - -``` -docs-build/ -└── packages/ - ├── core/ - │ ├── index.md # from README.md - │ ├── claude.md # from CLAUDE.md - │ ├── changelog.md # from CHANGELOG.md - │ ├── build.md # from docs/build.md - │ └── ... - └── core-php/ - ├── index.md - └── ... -``` - -### Example - -```bash -# Preview what will be synced -core docs sync --dry-run - -# Sync to default output -core docs sync - -# Sync to custom directory -core docs sync --output ./site/content -``` - -## What Gets Synced - -For each repo, the following files are collected: - -| Source | Destination | -|--------|-------------| -| `README.md` | `index.md` | -| `CLAUDE.md` | `claude.md` | -| `CHANGELOG.md` | `changelog.md` | -| `docs/*.md` | `*.md` | - -## Integration with core.help - -The synced docs are used to build https://core.help: - -1. Run `core docs sync --output ../core-php/docs/packages` -2. VitePress builds the combined documentation -3. Deploy to core.help - -## See Also - -- [Configuration](../../configuration.md) - Project configuration diff --git a/docs/cmd/doctor/example.md b/docs/cmd/doctor/example.md deleted file mode 100644 index ba94d71..0000000 --- a/docs/cmd/doctor/example.md +++ /dev/null @@ -1,20 +0,0 @@ -# Doctor Examples - -```bash -core doctor -``` - -## Output - -``` -✓ go 1.25.0 -✓ git 2.43.0 -✓ gh 2.40.0 -✓ docker 24.0.7 -✓ task 3.30.0 -✓ golangci-lint 1.55.0 -✗ wails (not installed) -✓ php 8.3.0 -✓ composer 2.6.0 -✓ node 20.10.0 -``` diff --git a/docs/cmd/doctor/index.md b/docs/cmd/doctor/index.md deleted file mode 100644 index 02cc44d..0000000 --- a/docs/cmd/doctor/index.md +++ /dev/null @@ -1,81 +0,0 @@ -# core doctor - -Check your development environment for required tools and configuration. - -## Usage - -```bash -core doctor [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--verbose` | Show detailed version information | - -## What It Checks - -### Required Tools - -| Tool | Purpose | -|------|---------| -| `git` | Version control | -| `go` | Go compiler | -| `gh` | GitHub CLI | - -### Optional Tools - -| Tool | Purpose | -|------|---------| -| `node` | Node.js runtime | -| `docker` | Container runtime | -| `wails` | Desktop app framework | -| `qemu` | VM runtime for LinuxKit | -| `gpg` | Code signing | -| `codesign` | macOS signing (macOS only) | - -### Configuration - -- Git user name and email -- GitHub CLI authentication -- Go workspace setup - -## Output - -``` -Core Doctor -=========== - -Required: - [OK] git 2.43.0 - [OK] go 1.23.0 - [OK] gh 2.40.0 - -Optional: - [OK] node 20.10.0 - [OK] docker 24.0.7 - [--] wails (not installed) - [OK] qemu 8.2.0 - [OK] gpg 2.4.3 - [OK] codesign (available) - -Configuration: - [OK] git user.name: Your Name - [OK] git user.email: you@example.com - [OK] gh auth status: Logged in - -All checks passed! -``` - -## Exit Codes - -| Code | Meaning | -|------|---------| -| 0 | All required checks passed | -| 1 | One or more required checks failed | - -## See Also - -- [setup command](../setup/) - Clone repos from registry -- [dev](../dev/) - Development environment diff --git a/docs/cmd/go/cov/example.md b/docs/cmd/go/cov/example.md deleted file mode 100644 index 4fdc6c2..0000000 --- a/docs/cmd/go/cov/example.md +++ /dev/null @@ -1,18 +0,0 @@ -# Go Coverage Examples - -```bash -# Summary -core go cov - -# HTML report -core go cov --html - -# Open in browser -core go cov --open - -# Fail if below threshold -core go cov --threshold 80 - -# Specific package -core go cov --pkg ./pkg/release -``` diff --git a/docs/cmd/go/cov/index.md b/docs/cmd/go/cov/index.md deleted file mode 100644 index 3adeca3..0000000 --- a/docs/cmd/go/cov/index.md +++ /dev/null @@ -1,28 +0,0 @@ -# core go cov - -Generate coverage report with thresholds. - -## Usage - -```bash -core go cov [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--pkg` | Package to test (default: `./...`) | -| `--html` | Generate HTML coverage report | -| `--open` | Generate and open HTML report in browser | -| `--threshold` | Minimum coverage percentage (exit 1 if below) | - -## Examples - -```bash -core go cov # Summary -core go cov --html # HTML report -core go cov --open # Open in browser -core go cov --threshold 80 # Fail if < 80% -core go cov --pkg ./pkg/release # Specific package -``` diff --git a/docs/cmd/go/example.md b/docs/cmd/go/example.md deleted file mode 100644 index 51ad71a..0000000 --- a/docs/cmd/go/example.md +++ /dev/null @@ -1,89 +0,0 @@ -# Go Examples - -## Testing - -```bash -# Run all tests -core go test - -# Specific package -core go test --pkg ./pkg/core - -# Specific test -core go test --run TestHash - -# With coverage -core go test --coverage - -# Race detection -core go test --race -``` - -## Coverage - -```bash -# Summary -core go cov - -# HTML report -core go cov --html - -# Open in browser -core go cov --open - -# Fail if below threshold -core go cov --threshold 80 -``` - -## Formatting - -```bash -# Check -core go fmt - -# Fix -core go fmt --fix - -# Show diff -core go fmt --diff -``` - -## Linting - -```bash -# Check -core go lint - -# Auto-fix -core go lint --fix -``` - -## Installing - -```bash -# Auto-detect cmd/ -core go install - -# Specific path -core go install ./cmd/myapp - -# Pure Go (no CGO) -core go install --no-cgo -``` - -## Module Management - -```bash -core go mod tidy -core go mod download -core go mod verify -core go mod graph -``` - -## Workspace - -```bash -core go work sync -core go work init -core go work use ./pkg/mymodule -``` diff --git a/docs/cmd/go/fmt/example.md b/docs/cmd/go/fmt/example.md deleted file mode 100644 index 40233e0..0000000 --- a/docs/cmd/go/fmt/example.md +++ /dev/null @@ -1,12 +0,0 @@ -# Go Format Examples - -```bash -# Check only -core go fmt - -# Apply fixes -core go fmt --fix - -# Show diff -core go fmt --diff -``` diff --git a/docs/cmd/go/fmt/index.md b/docs/cmd/go/fmt/index.md deleted file mode 100644 index fe6113e..0000000 --- a/docs/cmd/go/fmt/index.md +++ /dev/null @@ -1,25 +0,0 @@ -# core go fmt - -Format Go code using goimports or gofmt. - -## Usage - -```bash -core go fmt [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--fix` | Fix formatting in place | -| `--diff` | Show diff of changes | -| `--check` | Check only, exit 1 if not formatted | - -## Examples - -```bash -core go fmt # Check formatting -core go fmt --fix # Fix formatting -core go fmt --diff # Show diff -``` diff --git a/docs/cmd/go/index.md b/docs/cmd/go/index.md deleted file mode 100644 index 981953c..0000000 --- a/docs/cmd/go/index.md +++ /dev/null @@ -1,15 +0,0 @@ -# core go - -Go development tools with enhanced output and environment setup. - -## Subcommands - -| Command | Description | -|---------|-------------| -| [test](test/) | Run tests with coverage | -| [cov](cov/) | Run tests with coverage report | -| [fmt](fmt/) | Format Go code | -| [lint](lint/) | Run golangci-lint | -| [install](install/) | Install Go binary | -| [mod](mod/) | Module management | -| [work](work/) | Workspace management | diff --git a/docs/cmd/go/install/example.md b/docs/cmd/go/install/example.md deleted file mode 100644 index bba88cd..0000000 --- a/docs/cmd/go/install/example.md +++ /dev/null @@ -1,15 +0,0 @@ -# Go Install Examples - -```bash -# Auto-detect cmd/ -core go install - -# Specific path -core go install ./cmd/myapp - -# Pure Go (no CGO) -core go install --no-cgo - -# Verbose -core go install -v -``` diff --git a/docs/cmd/go/install/index.md b/docs/cmd/go/install/index.md deleted file mode 100644 index e7bd109..0000000 --- a/docs/cmd/go/install/index.md +++ /dev/null @@ -1,25 +0,0 @@ -# core go install - -Install Go binary with auto-detection. - -## Usage - -```bash -core go install [path] [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--no-cgo` | Disable CGO | -| `-v` | Verbose | - -## Examples - -```bash -core go install # Install current module -core go install ./cmd/core # Install specific path -core go install --no-cgo # Pure Go (no C dependencies) -core go install -v # Verbose output -``` diff --git a/docs/cmd/go/lint/example.md b/docs/cmd/go/lint/example.md deleted file mode 100644 index 56b46d4..0000000 --- a/docs/cmd/go/lint/example.md +++ /dev/null @@ -1,22 +0,0 @@ -# Go Lint Examples - -```bash -# Check -core go lint - -# Auto-fix -core go lint --fix -``` - -## Configuration - -`.golangci.yml`: - -```yaml -linters: - enable: - - gofmt - - govet - - errcheck - - staticcheck -``` diff --git a/docs/cmd/go/lint/index.md b/docs/cmd/go/lint/index.md deleted file mode 100644 index 5f9e804..0000000 --- a/docs/cmd/go/lint/index.md +++ /dev/null @@ -1,22 +0,0 @@ -# core go lint - -Run golangci-lint. - -## Usage - -```bash -core go lint [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--fix` | Fix issues automatically | - -## Examples - -```bash -core go lint # Check -core go lint --fix # Auto-fix -``` diff --git a/docs/cmd/go/mod/download/index.md b/docs/cmd/go/mod/download/index.md deleted file mode 100644 index 240ef6d..0000000 --- a/docs/cmd/go/mod/download/index.md +++ /dev/null @@ -1,29 +0,0 @@ -# core go mod download - -Download modules to local cache. - -Wrapper around `go mod download`. Downloads all dependencies to the module cache. - -## Usage - -```bash -core go mod download -``` - -## What It Does - -- Downloads all modules in go.mod to `$GOPATH/pkg/mod` -- Useful for pre-populating cache for offline builds -- Validates checksums against go.sum - -## Examples - -```bash -# Download all dependencies -core go mod download -``` - -## See Also - -- [tidy](../tidy/) - Clean up go.mod -- [verify](../verify/) - Verify checksums diff --git a/docs/cmd/go/mod/example.md b/docs/cmd/go/mod/example.md deleted file mode 100644 index 57d2e66..0000000 --- a/docs/cmd/go/mod/example.md +++ /dev/null @@ -1,15 +0,0 @@ -# Go Module Examples - -```bash -# Tidy -core go mod tidy - -# Download -core go mod download - -# Verify -core go mod verify - -# Graph -core go mod graph -``` diff --git a/docs/cmd/go/mod/graph/index.md b/docs/cmd/go/mod/graph/index.md deleted file mode 100644 index 2aa2619..0000000 --- a/docs/cmd/go/mod/graph/index.md +++ /dev/null @@ -1,44 +0,0 @@ -# core go mod graph - -Print module dependency graph. - -Wrapper around `go mod graph`. Outputs the module dependency graph in text form. - -## Usage - -```bash -core go mod graph -``` - -## What It Does - -- Prints module dependencies as pairs -- Each line shows: `module@version dependency@version` -- Useful for understanding dependency relationships - -## Examples - -```bash -# Print dependency graph -core go mod graph - -# Find who depends on a specific module -core go mod graph | grep "some/module" - -# Visualise with graphviz -core go mod graph | dot -Tpng -o deps.png -``` - -## Output - -``` -github.com/host-uk/core github.com/stretchr/testify@v1.11.1 -github.com/stretchr/testify@v1.11.1 github.com/davecgh/go-spew@v1.1.2 -github.com/stretchr/testify@v1.11.1 github.com/pmezard/go-difflib@v1.0.1 -... -``` - -## See Also - -- [tidy](../tidy/) - Clean up go.mod -- [dev impact](../../../dev/impact/) - Show repo dependency impact diff --git a/docs/cmd/go/mod/index.md b/docs/cmd/go/mod/index.md deleted file mode 100644 index ee8e46e..0000000 --- a/docs/cmd/go/mod/index.md +++ /dev/null @@ -1,21 +0,0 @@ -# core go mod - -Module management. - -## Subcommands - -| Command | Description | -|---------|-------------| -| `tidy` | Add missing and remove unused modules | -| `download` | Download modules to local cache | -| `verify` | Verify dependencies | -| `graph` | Print module dependency graph | - -## Examples - -```bash -core go mod tidy -core go mod download -core go mod verify -core go mod graph -``` diff --git a/docs/cmd/go/mod/tidy/index.md b/docs/cmd/go/mod/tidy/index.md deleted file mode 100644 index 684b07e..0000000 --- a/docs/cmd/go/mod/tidy/index.md +++ /dev/null @@ -1,29 +0,0 @@ -# core go mod tidy - -Add missing and remove unused modules. - -Wrapper around `go mod tidy`. Ensures go.mod and go.sum are in sync with the source code. - -## Usage - -```bash -core go mod tidy -``` - -## What It Does - -- Adds missing module requirements -- Removes unused module requirements -- Updates go.sum with checksums - -## Examples - -```bash -# Tidy the current module -core go mod tidy -``` - -## See Also - -- [download](../download/) - Download modules -- [verify](../verify/) - Verify dependencies diff --git a/docs/cmd/go/mod/verify/index.md b/docs/cmd/go/mod/verify/index.md deleted file mode 100644 index e01dc2a..0000000 --- a/docs/cmd/go/mod/verify/index.md +++ /dev/null @@ -1,41 +0,0 @@ -# core go mod verify - -Verify dependencies have not been modified. - -Wrapper around `go mod verify`. Checks that dependencies in the module cache match their checksums in go.sum. - -## Usage - -```bash -core go mod verify -``` - -## What It Does - -- Verifies each module in the cache -- Compares against go.sum checksums -- Reports any tampering or corruption - -## Examples - -```bash -# Verify all dependencies -core go mod verify -``` - -## Output - -``` -all modules verified -``` - -Or if verification fails: - -``` -github.com/example/pkg v1.2.3: dir has been modified -``` - -## See Also - -- [download](../download/) - Download modules -- [tidy](../tidy/) - Clean up go.mod diff --git a/docs/cmd/go/test/example.md b/docs/cmd/go/test/example.md deleted file mode 100644 index 85ff1b5..0000000 --- a/docs/cmd/go/test/example.md +++ /dev/null @@ -1,27 +0,0 @@ -# Go Test Examples - -```bash -# All tests -core go test - -# Specific package -core go test --pkg ./pkg/core - -# Specific test -core go test --run TestHash - -# With coverage -core go test --coverage - -# Race detection -core go test --race - -# Short tests only -core go test --short - -# Verbose -core go test -v - -# JSON output (CI) -core go test --json -``` diff --git a/docs/cmd/go/test/index.md b/docs/cmd/go/test/index.md deleted file mode 100644 index 8b54524..0000000 --- a/docs/cmd/go/test/index.md +++ /dev/null @@ -1,31 +0,0 @@ -# core go test - -Run Go tests with coverage and filtered output. - -## Usage - -```bash -core go test [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--pkg` | Package to test (default: `./...`) | -| `--run` | Run only tests matching regexp | -| `--short` | Run only short tests | -| `--race` | Enable race detector | -| `--coverage` | Show detailed per-package coverage | -| `--json` | Output JSON results | -| `-v` | Verbose output | - -## Examples - -```bash -core go test # All tests -core go test --pkg ./pkg/core # Specific package -core go test --run TestHash # Specific test -core go test --coverage # With coverage -core go test --race # Race detection -``` diff --git a/docs/cmd/go/work/index.md b/docs/cmd/go/work/index.md deleted file mode 100644 index 4022507..0000000 --- a/docs/cmd/go/work/index.md +++ /dev/null @@ -1,19 +0,0 @@ -# core go work - -Go workspace management commands. - -## Subcommands - -| Command | Description | -|---------|-------------| -| `sync` | Sync go.work with modules | -| `init` | Initialize go.work | -| `use` | Add module to workspace | - -## Examples - -```bash -core go work sync # Sync workspace -core go work init # Initialize workspace -core go work use ./pkg/mymodule # Add module to workspace -``` diff --git a/docs/cmd/go/work/init/index.md b/docs/cmd/go/work/init/index.md deleted file mode 100644 index 6527324..0000000 --- a/docs/cmd/go/work/init/index.md +++ /dev/null @@ -1,40 +0,0 @@ -# core go work init - -Initialize a Go workspace. - -Wrapper around `go work init`. Creates a new go.work file in the current directory. - -## Usage - -```bash -core go work init -``` - -## What It Does - -- Creates a go.work file -- Automatically adds current module if go.mod exists -- Enables multi-module development - -## Examples - -```bash -# Initialize workspace -core go work init - -# Then add more modules -core go work use ./pkg/mymodule -``` - -## Generated File - -```go -go 1.25 - -use . -``` - -## See Also - -- [use](../use/) - Add module to workspace -- [sync](../sync/) - Sync workspace diff --git a/docs/cmd/go/work/sync/index.md b/docs/cmd/go/work/sync/index.md deleted file mode 100644 index 38caed1..0000000 --- a/docs/cmd/go/work/sync/index.md +++ /dev/null @@ -1,35 +0,0 @@ -# core go work sync - -Sync go.work with modules. - -Wrapper around `go work sync`. Synchronises the workspace's build list back to the workspace modules. - -## Usage - -```bash -core go work sync -``` - -## What It Does - -- Updates each module's go.mod to match the workspace build list -- Ensures all modules use compatible dependency versions -- Run after adding new modules or updating dependencies - -## Examples - -```bash -# Sync workspace -core go work sync -``` - -## When To Use - -- After running `go get` to update a dependency -- After adding a new module with `core go work use` -- When modules have conflicting dependency versions - -## See Also - -- [init](../init/) - Initialize workspace -- [use](../use/) - Add module to workspace diff --git a/docs/cmd/go/work/use/index.md b/docs/cmd/go/work/use/index.md deleted file mode 100644 index 25e0cab..0000000 --- a/docs/cmd/go/work/use/index.md +++ /dev/null @@ -1,46 +0,0 @@ -# core go work use - -Add module to workspace. - -Wrapper around `go work use`. Adds one or more modules to the go.work file. - -## Usage - -```bash -core go work use [paths...] -``` - -## What It Does - -- Adds specified module paths to go.work -- Auto-discovers modules if no paths given -- Enables developing multiple modules together - -## Examples - -```bash -# Add a specific module -core go work use ./pkg/mymodule - -# Add multiple modules -core go work use ./pkg/one ./pkg/two - -# Auto-discover and add all modules -core go work use -``` - -## Auto-Discovery - -When called without arguments, scans for go.mod files and adds all found modules: - -```bash -core go work use -# Added ./pkg/build -# Added ./pkg/repos -# Added ./cmd/core -``` - -## See Also - -- [init](../init/) - Initialize workspace -- [sync](../sync/) - Sync workspace diff --git a/docs/cmd/index.md b/docs/cmd/index.md deleted file mode 100644 index fce3183..0000000 --- a/docs/cmd/index.md +++ /dev/null @@ -1,31 +0,0 @@ -# Core CLI - -Unified interface for Go/PHP development, multi-repo management, and deployment. - -## Commands - -| Command | Description | -|---------|-------------| -| [ai](ai/) | AI agent task management and Claude integration | -| [go](go/) | Go development tools | -| [php](php/) | Laravel/PHP development tools | -| [build](build/) | Build projects | -| [ci](ci/) | Publish releases | -| [sdk](sdk/) | SDK validation and compatibility | -| [dev](dev/) | Multi-repo workflow + dev environment | -| [pkg](pkg/) | Package management | -| [vm](vm/) | LinuxKit VM management | -| [docs](docs/) | Documentation management | -| [setup](setup/) | Clone repos from registry | -| [doctor](doctor/) | Check environment | -| [test](test/) | Run Go tests with coverage | - -## Installation - -```bash -go install github.com/host-uk/core/cmd/core@latest -``` - -Verify: `core doctor` - -See [Getting Started](../getting-started.md) for all installation options. diff --git a/docs/cmd/php/example.md b/docs/cmd/php/example.md deleted file mode 100644 index 96e1600..0000000 --- a/docs/cmd/php/example.md +++ /dev/null @@ -1,111 +0,0 @@ -# PHP Examples - -## Development - -```bash -# Start all services -core php dev - -# With HTTPS -core php dev --https - -# Skip services -core php dev --no-vite --no-horizon -``` - -## Testing - -```bash -# Run all -core php test - -# Parallel -core php test --parallel - -# With coverage -core php test --coverage - -# Filter -core php test --filter UserTest -``` - -## Code Quality - -```bash -# Format -core php fmt --fix - -# Static analysis -core php analyse --level 9 -``` - -## Deployment - -```bash -# Production -core php deploy - -# Staging -core php deploy --staging - -# Wait for completion -core php deploy --wait - -# Check status -core php deploy:status - -# Rollback -core php deploy:rollback -``` - -## Configuration - -### .env - -```env -COOLIFY_URL=https://coolify.example.com -COOLIFY_TOKEN=your-api-token -COOLIFY_APP_ID=production-app-id -COOLIFY_STAGING_APP_ID=staging-app-id -``` - -### .core/php.yaml - -```yaml -version: 1 - -dev: - domain: myapp.test - ssl: true - services: - - frankenphp - - vite - - horizon - - reverb - - redis - -deploy: - coolify: - server: https://coolify.example.com - project: my-project -``` - -## Package Linking - -```bash -# Link local packages -core php packages link ../my-package - -# Update linked -core php packages update - -# Unlink -core php packages unlink my-package -``` - -## SSL Setup - -```bash -core php ssl -core php ssl --domain myapp.test -``` diff --git a/docs/cmd/php/index.md b/docs/cmd/php/index.md deleted file mode 100644 index 83ad596..0000000 --- a/docs/cmd/php/index.md +++ /dev/null @@ -1,413 +0,0 @@ -# core php - -Laravel/PHP development tools with FrankenPHP. - -## Commands - -### Development - -| Command | Description | -|---------|-------------| -| [`dev`](#php-dev) | Start development environment | -| [`logs`](#php-logs) | View service logs | -| [`stop`](#php-stop) | Stop all services | -| [`status`](#php-status) | Show service status | -| [`ssl`](#php-ssl) | Setup SSL certificates with mkcert | - -### Build & Production - -| Command | Description | -|---------|-------------| -| [`build`](#php-build) | Build Docker or LinuxKit image | -| [`serve`](#php-serve) | Run production container | -| [`shell`](#php-shell) | Open shell in running container | - -### Code Quality - -| Command | Description | -|---------|-------------| -| [`test`](#php-test) | Run PHP tests (PHPUnit/Pest) | -| [`fmt`](#php-fmt) | Format code with Laravel Pint | -| [`analyse`](#php-analyse) | Run PHPStan static analysis | - -### Package Management - -| Command | Description | -|---------|-------------| -| [`packages link`](#php-packages-link) | Link local packages by path | -| [`packages unlink`](#php-packages-unlink) | Unlink packages by name | -| [`packages update`](#php-packages-update) | Update linked packages | -| [`packages list`](#php-packages-list) | List linked packages | - -### Deployment (Coolify) - -| Command | Description | -|---------|-------------| -| [`deploy`](#php-deploy) | Deploy to Coolify | -| [`deploy:status`](#php-deploystatus) | Show deployment status | -| [`deploy:rollback`](#php-deployrollback) | Rollback to previous deployment | -| [`deploy:list`](#php-deploylist) | List recent deployments | - ---- - -## php dev - -Start the Laravel development environment with all detected services. - -```bash -core php dev [flags] -``` - -### Services Orchestrated - -- **FrankenPHP/Octane** - HTTP server (port 8000, HTTPS on 443) -- **Vite** - Frontend dev server (port 5173) -- **Laravel Horizon** - Queue workers -- **Laravel Reverb** - WebSocket server (port 8080) -- **Redis** - Cache and queue backend (port 6379) - -### Flags - -| Flag | Description | -|------|-------------| -| `--no-vite` | Skip Vite dev server | -| `--no-horizon` | Skip Laravel Horizon | -| `--no-reverb` | Skip Laravel Reverb | -| `--no-redis` | Skip Redis server | -| `--https` | Enable HTTPS with mkcert | -| `--domain` | Domain for SSL certificate (default: from APP_URL) | -| `--port` | FrankenPHP port (default: 8000) | - -### Examples - -```bash -# Start all detected services -core php dev - -# With HTTPS -core php dev --https - -# Skip optional services -core php dev --no-horizon --no-reverb -``` - ---- - -## php logs - -Stream unified logs from all running services. - -```bash -core php logs [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--follow` | Follow log output | -| `--service` | Specific service (frankenphp, vite, horizon, reverb, redis) | - ---- - -## php stop - -Stop all running Laravel services. - -```bash -core php stop -``` - ---- - -## php status - -Show the status of all Laravel services and project configuration. - -```bash -core php status -``` - ---- - -## php ssl - -Setup local SSL certificates using mkcert. - -```bash -core php ssl [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--domain` | Domain for certificate (default: from APP_URL or localhost) | - ---- - -## php build - -Build a production-ready container image. - -```bash -core php build [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--type` | Build type: `docker` (default) or `linuxkit` | -| `--name` | Image name (default: project directory name) | -| `--tag` | Image tag (default: latest) | -| `--platform` | Target platform (e.g., linux/amd64, linux/arm64) | -| `--dockerfile` | Path to custom Dockerfile | -| `--output` | Output path for LinuxKit image | -| `--format` | LinuxKit format: qcow2 (default), iso, raw, vmdk | -| `--template` | LinuxKit template name (default: server-php) | -| `--no-cache` | Build without cache | - -### Examples - -```bash -# Build Docker image -core php build - -# With custom name and tag -core php build --name myapp --tag v1.0 - -# Build LinuxKit image -core php build --type linuxkit -``` - ---- - -## php serve - -Run a production container. - -```bash -core php serve [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--name` | Docker image name (required) | -| `--tag` | Image tag (default: latest) | -| `--container` | Container name | -| `--port` | HTTP port (default: 80) | -| `--https-port` | HTTPS port (default: 443) | -| `-d` | Run in detached mode | -| `--env-file` | Path to environment file | - -### Examples - -```bash -core php serve --name myapp -core php serve --name myapp -d -core php serve --name myapp --port 8080 -``` - ---- - -## php shell - -Open an interactive shell in a running container. - -```bash -core php shell -``` - ---- - -## php test - -Run PHP tests using PHPUnit or Pest. - -```bash -core php test [flags] -``` - -Auto-detects Pest if `tests/Pest.php` exists. - -### Flags - -| Flag | Description | -|------|-------------| -| `--parallel` | Run tests in parallel | -| `--coverage` | Generate code coverage | -| `--filter` | Filter tests by name pattern | -| `--group` | Run only tests in specified group | - -### Examples - -```bash -core php test -core php test --parallel --coverage -core php test --filter UserTest -``` - ---- - -## php fmt - -Format PHP code using Laravel Pint. - -```bash -core php fmt [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--fix` | Auto-fix formatting issues | -| `--diff` | Show diff of changes | - ---- - -## php analyse - -Run PHPStan or Larastan static analysis. - -```bash -core php analyse [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--level` | PHPStan analysis level (0-9) | -| `--memory` | Memory limit (e.g., 2G) | - ---- - -## php packages link - -Link local PHP packages for development. - -```bash -core php packages link [...] -``` - -Adds path repositories to composer.json with symlink enabled. - ---- - -## php packages unlink - -Remove linked packages from composer.json. - -```bash -core php packages unlink [...] -``` - ---- - -## php packages update - -Update linked packages via Composer. - -```bash -core php packages update [...] -``` - ---- - -## php packages list - -List all locally linked packages. - -```bash -core php packages list -``` - ---- - -## php deploy - -Deploy the PHP application to Coolify. - -```bash -core php deploy [flags] -``` - -### Configuration - -Requires environment variables in `.env`: -``` -COOLIFY_URL=https://coolify.example.com -COOLIFY_TOKEN=your-api-token -COOLIFY_APP_ID=production-app-id -COOLIFY_STAGING_APP_ID=staging-app-id -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--staging` | Deploy to staging environment | -| `--force` | Force deployment even if no changes detected | -| `--wait` | Wait for deployment to complete | - ---- - -## php deploy:status - -Show the status of a deployment. - -```bash -core php deploy:status [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--staging` | Check staging environment | -| `--id` | Specific deployment ID | - ---- - -## php deploy:rollback - -Rollback to a previous deployment. - -```bash -core php deploy:rollback [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--staging` | Rollback staging environment | -| `--id` | Specific deployment ID to rollback to | -| `--wait` | Wait for rollback to complete | - ---- - -## php deploy:list - -List recent deployments. - -```bash -core php deploy:list [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--staging` | List staging deployments | -| `--limit` | Number of deployments (default: 10) | - ---- - -## Configuration - -Optional `.core/php.yaml` - see [Configuration](example.md#configuration) for examples. diff --git a/docs/cmd/pkg/example.md b/docs/cmd/pkg/example.md deleted file mode 100644 index 7904aae..0000000 --- a/docs/cmd/pkg/example.md +++ /dev/null @@ -1,36 +0,0 @@ -# Package Examples - -## Search - -```bash -core pkg search core- -core pkg search api -core pkg search --org myorg -``` - -## Install - -```bash -core pkg install core-api -core pkg install host-uk/core-api -``` - -## List - -```bash -core pkg list -core pkg list --format json -``` - -## Update - -```bash -core pkg update -core pkg update core-api -``` - -## Outdated - -```bash -core pkg outdated -``` diff --git a/docs/cmd/pkg/index.md b/docs/cmd/pkg/index.md deleted file mode 100644 index fcc218b..0000000 --- a/docs/cmd/pkg/index.md +++ /dev/null @@ -1,144 +0,0 @@ -# core pkg - -Package management for host-uk repositories. - -## Usage - -```bash -core pkg [flags] -``` - -## Commands - -| Command | Description | -|---------|-------------| -| [`search`](#pkg-search) | Search GitHub for packages | -| [`install`](#pkg-install) | Clone a package from GitHub | -| [`list`](#pkg-list) | List installed packages | -| [`update`](#pkg-update) | Update installed packages | -| [`outdated`](#pkg-outdated) | Check for outdated packages | - ---- - -## pkg search - -Search GitHub for host-uk packages. - -```bash -core pkg search [flags] -``` - -Results are cached for 1 hour in `.core/cache/`. - -### Flags - -| Flag | Description | -|------|-------------| -| `--org` | GitHub organisation (default: host-uk) | -| `--pattern` | Repo name pattern (* for wildcard) | -| `--type` | Filter by type in name (mod, services, plug, website) | -| `--limit` | Max results (default: 50) | -| `--refresh` | Bypass cache and fetch fresh data | - -### Examples - -```bash -# List all repos in org -core pkg search - -# Search for core-* repos -core pkg search --pattern 'core-*' - -# Search different org -core pkg search --org mycompany - -# Bypass cache -core pkg search --refresh -``` - ---- - -## pkg install - -Clone a package from GitHub. - -```bash -core pkg install [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--dir` | Target directory (default: ./packages or current dir) | -| `--add` | Add to repos.yaml registry | - -### Examples - -```bash -# Clone to packages/ -core pkg install host-uk/core-php - -# Clone to custom directory -core pkg install host-uk/core-tenant --dir ./packages - -# Clone and add to registry -core pkg install host-uk/core-admin --add -``` - ---- - -## pkg list - -List installed packages from repos.yaml. - -```bash -core pkg list -``` - -Shows installed status (✓) and description for each package. - ---- - -## pkg update - -Pull latest changes for installed packages. - -```bash -core pkg update [...] [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--all` | Update all packages | - -### Examples - -```bash -# Update specific package -core pkg update core-php - -# Update all packages -core pkg update --all -``` - ---- - -## pkg outdated - -Check which packages have unpulled commits. - -```bash -core pkg outdated -``` - -Fetches from remote and shows packages that are behind. - ---- - -## See Also - -- [setup](../setup/) - Clone all repos from registry -- [dev work](../dev/work/) - Multi-repo workflow diff --git a/docs/cmd/pkg/search/example.md b/docs/cmd/pkg/search/example.md deleted file mode 100644 index fbcaa6f..0000000 --- a/docs/cmd/pkg/search/example.md +++ /dev/null @@ -1,23 +0,0 @@ -# Package Search Examples - -```bash -# Find all core-* packages -core pkg search core- - -# Search term -core pkg search api - -# Different org -core pkg search --org myorg query -``` - -## Output - -``` -┌──────────────┬─────────────────────────────┐ -│ Package │ Description │ -├──────────────┼─────────────────────────────┤ -│ core-api │ REST API framework │ -│ core-auth │ Authentication utilities │ -└──────────────┴─────────────────────────────┘ -``` diff --git a/docs/cmd/pkg/search/index.md b/docs/cmd/pkg/search/index.md deleted file mode 100644 index 57fea91..0000000 --- a/docs/cmd/pkg/search/index.md +++ /dev/null @@ -1,75 +0,0 @@ -# core pkg search - -Search GitHub for repositories matching a pattern. - -Uses `gh` CLI for authenticated search. Results are cached for 1 hour. - -## Usage - -```bash -core pkg search [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--pattern` | Repo name pattern (* for wildcard) | -| `--org` | GitHub organization (default: host-uk) | -| `--type` | Filter by type in name (mod, services, plug, website) | -| `--limit` | Max results (default: 50) | -| `--refresh` | Bypass cache and fetch fresh data | - -## Examples - -```bash -# List all host-uk repos -core pkg search - -# Search for core-* repos -core pkg search --pattern "core-*" - -# Search different org -core pkg search --org mycompany - -# Filter by type -core pkg search --type services - -# Bypass cache -core pkg search --refresh - -# Combine filters -core pkg search --pattern "core-*" --type mod --limit 20 -``` - -## Output - -``` -Found 5 repositories: - - host-uk/core - Go CLI for the host-uk ecosystem - ★ 42 Go Updated 2 hours ago - - host-uk/core-php - PHP/Laravel packages for Core - ★ 18 PHP Updated 1 day ago - - host-uk/core-images - Docker and LinuxKit images - ★ 8 Dockerfile Updated 3 days ago -``` - -## Authentication - -Uses GitHub CLI (`gh`) authentication. Ensure you're logged in: - -```bash -gh auth status -gh auth login # if not authenticated -``` - -## See Also - -- [pkg install](../) - Clone a package from GitHub -- [setup command](../../setup/) - Clone all repos from registry diff --git a/docs/cmd/sdk/example.md b/docs/cmd/sdk/example.md deleted file mode 100644 index 2fada8c..0000000 --- a/docs/cmd/sdk/example.md +++ /dev/null @@ -1,35 +0,0 @@ -# SDK Examples - -## Validate - -```bash -core sdk validate -core sdk validate --spec ./api.yaml -``` - -## Diff - -```bash -# Compare with tag -core sdk diff --base v1.0.0 - -# Compare files -core sdk diff --base ./old-api.yaml --spec ./new-api.yaml -``` - -## Output - -``` -Breaking changes detected: - -- DELETE /users/{id}/profile - Endpoint removed - -- PATCH /users/{id} - Required field 'email' added - -Non-breaking changes: - -+ POST /users/{id}/avatar - New endpoint added -``` diff --git a/docs/cmd/sdk/index.md b/docs/cmd/sdk/index.md deleted file mode 100644 index bd6828c..0000000 --- a/docs/cmd/sdk/index.md +++ /dev/null @@ -1,106 +0,0 @@ -# core sdk - -SDK validation and API compatibility tools. - -To generate SDKs, use: `core build sdk` - -## Usage - -```bash -core sdk [flags] -``` - -## Commands - -| Command | Description | -|---------|-------------| -| `diff` | Check for breaking API changes | -| `validate` | Validate OpenAPI spec | - -## sdk validate - -Validate an OpenAPI specification file. - -```bash -core sdk validate [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--spec` | Path to OpenAPI spec file (auto-detected) | - -### Examples - -```bash -# Validate detected spec -core sdk validate - -# Validate specific file -core sdk validate --spec api/openapi.yaml -``` - -## sdk diff - -Check for breaking changes between API versions. - -```bash -core sdk diff [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `--base` | Base spec version (git tag or file path) | -| `--spec` | Current spec file (auto-detected) | - -### Examples - -```bash -# Compare against previous release -core sdk diff --base v1.0.0 - -# Compare two files -core sdk diff --base old-api.yaml --spec new-api.yaml -``` - -### Breaking Changes Detected - -- Removed endpoints -- Changed parameter types -- Removed required fields -- Changed response types - -## SDK Generation - -SDK generation is handled by `core build sdk`, not this command. - -```bash -# Generate SDKs -core build sdk - -# Generate specific language -core build sdk --lang typescript - -# Preview without writing -core build sdk --dry-run -``` - -See [build sdk](../build/sdk/) for generation details. - -## Spec Auto-Detection - -Core looks for OpenAPI specs in this order: - -1. Path specified in config (`sdk.spec`) -2. `openapi.yaml` / `openapi.json` -3. `api/openapi.yaml` / `api/openapi.json` -4. `docs/openapi.yaml` / `docs/openapi.json` -5. Laravel Scramble endpoint (`/docs/api.json`) - -## See Also - -- [build sdk](../build/sdk/) - Generate SDKs from OpenAPI -- [ci command](../ci/) - Release workflow diff --git a/docs/cmd/setup/example.md b/docs/cmd/setup/example.md deleted file mode 100644 index 23f2410..0000000 --- a/docs/cmd/setup/example.md +++ /dev/null @@ -1,293 +0,0 @@ -# Setup Examples - -## Clone from Registry - -```bash -# Clone all repos defined in repos.yaml -core setup - -# Preview what would be cloned -core setup --dry-run - -# Only foundation packages -core setup --only foundation - -# Multiple types -core setup --only foundation,module - -# Use specific registry file -core setup --registry ~/projects/repos.yaml -``` - -## Bootstrap New Workspace - -```bash -# In an empty directory - bootstraps in place -mkdir my-workspace && cd my-workspace -core setup - -# Shows interactive wizard to select packages: -# ┌─────────────────────────────────────────────┐ -# │ Select packages to clone │ -# │ Use space to select, enter to confirm │ -# │ │ -# │ ── Foundation (core framework) ── │ -# │ ☑ core-php Foundation framework │ -# │ ☑ core-tenant Multi-tenancy module │ -# │ │ -# │ ── Products (applications) ── │ -# │ ☐ core-bio Link-in-bio product │ -# │ ☐ core-social Social scheduling │ -# └─────────────────────────────────────────────┘ - -# Non-interactive: clone all packages -core setup --all - -# Create workspace in subdirectory -cd ~/Code -core setup --name my-project - -# CI mode: fully non-interactive -core setup --all --name ci-test -``` - -## Setup Single Repository - -```bash -# In a git repo without .core/ configuration -cd ~/Code/my-go-project -core setup - -# Shows choice dialog: -# ┌─────────────────────────────────────────────┐ -# │ Setup options │ -# │ You're in a git repository. What would you │ -# │ like to do? │ -# │ │ -# │ ● Setup this repo (create .core/ config) │ -# │ ○ Create a new workspace (clone repos) │ -# └─────────────────────────────────────────────┘ - -# Preview generated configuration -core setup --dry-run - -# Output: -# → Setting up repository configuration -# -# ✓ Detected project type: go -# → Also found: (none) -# -# → Would create: -# /Users/you/Code/my-go-project/.core/build.yaml -# -# Configuration preview: -# version: 1 -# project: -# name: my-go-project -# description: Go application -# main: ./cmd/my-go-project -# binary: my-go-project -# ... -``` - -## Configuration Files - -### repos.yaml (Workspace Registry) - -```yaml -org: host-uk -base_path: . -defaults: - ci: github - license: EUPL-1.2 - branch: main -repos: - core-php: - type: foundation - description: Foundation framework - core-tenant: - type: module - depends_on: [core-php] - description: Multi-tenancy module - core-admin: - type: module - depends_on: [core-php, core-tenant] - description: Admin panel - core-bio: - type: product - depends_on: [core-php, core-tenant] - description: Link-in-bio product - domain: bio.host.uk.com - core-devops: - type: foundation - clone: false # Already exists, skip cloning -``` - -### .core/build.yaml (Repository Config) - -Generated for Go projects: - -```yaml -version: 1 -project: - name: my-project - description: Go application - main: ./cmd/my-project - binary: my-project -build: - cgo: false - flags: - - -trimpath - ldflags: - - -s - - -w - env: [] -targets: - - os: linux - arch: amd64 - - os: linux - arch: arm64 - - os: darwin - arch: amd64 - - os: darwin - arch: arm64 - - os: windows - arch: amd64 -sign: - enabled: false -``` - -Generated for Wails projects: - -```yaml -version: 1 -project: - name: my-app - description: Wails desktop application - main: . - binary: my-app -targets: - - os: darwin - arch: amd64 - - os: darwin - arch: arm64 - - os: windows - arch: amd64 - - os: linux - arch: amd64 -``` - -### .core/release.yaml (Release Config) - -Generated for Go projects: - -```yaml -version: 1 -project: - name: my-project - repository: owner/my-project - -changelog: - include: - - feat - - fix - - perf - - refactor - exclude: - - chore - - docs - - style - - test - -publishers: - - type: github - draft: false - prerelease: false -``` - -### .core/test.yaml (Test Config) - -Generated for Go projects: - -```yaml -version: 1 - -commands: - - name: unit - run: go test ./... - - name: coverage - run: go test -coverprofile=coverage.out ./... - - name: race - run: go test -race ./... - -env: - CGO_ENABLED: "0" -``` - -Generated for PHP projects: - -```yaml -version: 1 - -commands: - - name: unit - run: vendor/bin/pest --parallel - - name: types - run: vendor/bin/phpstan analyse - - name: lint - run: vendor/bin/pint --test - -env: - APP_ENV: testing - DB_CONNECTION: sqlite -``` - -Generated for Node.js projects: - -```yaml -version: 1 - -commands: - - name: unit - run: npm test - - name: lint - run: npm run lint - - name: typecheck - run: npm run typecheck - -env: - NODE_ENV: test -``` - -## Workflow Examples - -### New Developer Setup - -```bash -# Clone the workspace -mkdir host-uk && cd host-uk -core setup - -# Select packages in wizard, then: -core health # Check all repos are healthy -core doctor # Verify environment -``` - -### CI Pipeline Setup - -```bash -# Non-interactive full clone -core setup --all --name workspace - -# Or with specific packages -core setup --only foundation,module --name workspace -``` - -### Adding Build Config to Existing Repo - -```bash -cd my-existing-project -core setup # Choose "Setup this repo" -# Edit .core/build.yaml as needed -core build # Build the project -``` diff --git a/docs/cmd/setup/index.md b/docs/cmd/setup/index.md deleted file mode 100644 index d07121f..0000000 --- a/docs/cmd/setup/index.md +++ /dev/null @@ -1,213 +0,0 @@ -# core setup - -Clone repositories from registry or bootstrap a new workspace. - -## Overview - -The `setup` command operates in three modes: - -1. **Registry mode** - When `repos.yaml` exists nearby, clones repositories into packages/ -2. **Bootstrap mode** - When no registry exists, clones `core-devops` first, then presents an interactive wizard to select packages -3. **Repo setup mode** - When run in a git repo root, offers to create `.core/build.yaml` configuration - -## Usage - -```bash -core setup [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--registry` | Path to repos.yaml (auto-detected if not specified) | -| `--dry-run` | Show what would be cloned without cloning | -| `--only` | Only clone repos of these types (comma-separated: foundation,module,product) | -| `--all` | Skip wizard, clone all packages (non-interactive) | -| `--name` | Project directory name for bootstrap mode | -| `--build` | Run build after cloning | - ---- - -## Registry Mode - -When `repos.yaml` is found nearby (current directory or parents), setup clones all defined repositories: - -```bash -# In a directory with repos.yaml -core setup - -# Preview what would be cloned -core setup --dry-run - -# Only clone foundation packages -core setup --only foundation - -# Multiple types -core setup --only foundation,module -``` - -In registry mode with a TTY, an interactive wizard allows you to select which packages to clone. Use `--all` to skip the wizard and clone everything. - ---- - -## Bootstrap Mode - -When no `repos.yaml` exists, setup enters bootstrap mode: - -```bash -# In an empty directory - bootstraps workspace in place -mkdir my-project && cd my-project -core setup - -# In a non-empty directory - creates subdirectory -cd ~/Code -core setup --name my-workspace - -# Non-interactive: clone all packages -core setup --all --name ci-test -``` - -Bootstrap mode: -1. Detects if current directory is empty -2. If not empty, prompts for project name (or uses `--name`) -3. Clones `core-devops` (contains `repos.yaml`) -4. Loads the registry from core-devops -5. Shows interactive package selection wizard (unless `--all`) -6. Clones selected packages -7. Optionally runs build (with `--build`) - ---- - -## Repo Setup Mode - -When run in a git repository root (without `repos.yaml`), setup offers two choices: - -1. **Setup Working Directory** - Creates `.core/build.yaml` based on detected project type -2. **Create Package** - Creates a subdirectory and clones packages there - -```bash -cd ~/Code/my-go-project -core setup - -# Output: -# >> This directory is a git repository -# > Setup Working Directory -# Create Package (clone repos into subdirectory) -``` - -Choosing "Setup Working Directory" detects the project type and generates configuration: - -| Detected File | Project Type | -|---------------|--------------| -| `wails.json` | Wails | -| `go.mod` | Go | -| `composer.json` | PHP | -| `package.json` | Node.js | - -Creates three config files in `.core/`: - -| File | Purpose | -|------|---------| -| `build.yaml` | Build targets, flags, output settings | -| `release.yaml` | Changelog format, GitHub release config | -| `test.yaml` | Test commands, environment variables | - -Also auto-detects GitHub repo from git remote for release config. - -See [Configuration Files](example.md#configuration-files) for generated config examples. - ---- - -## Interactive Wizard - -When running in a terminal (TTY), the setup command presents an interactive multi-select wizard: - -- Packages are grouped by type (foundation, module, product, template) -- Use arrow keys to navigate -- Press space to select/deselect packages -- Type to filter the list -- Press enter to confirm selection - -The wizard is skipped when: -- `--all` flag is specified -- Not running in a TTY (e.g., CI pipelines) -- `--dry-run` is specified - ---- - -## Examples - -### Clone from Registry - -```bash -# Clone all repos (interactive wizard) -core setup - -# Clone all repos (non-interactive) -core setup --all - -# Preview without cloning -core setup --dry-run - -# Only foundation packages -core setup --only foundation -``` - -### Bootstrap New Workspace - -```bash -# Interactive bootstrap in empty directory -mkdir workspace && cd workspace -core setup - -# Non-interactive with all packages -core setup --all --name my-project - -# Bootstrap and run build -core setup --all --name my-project --build -``` - ---- - -## Registry Format - -The registry file (`repos.yaml`) defines repositories. See [Configuration Files](example.md#configuration-files) for format. - ---- - -## Finding Registry - -Core looks for `repos.yaml` in: - -1. Current directory -2. Parent directories (walking up to root) -3. `~/Code/host-uk/repos.yaml` -4. `~/.config/core/repos.yaml` - ---- - -## After Setup - -```bash -# Check workspace health -core dev health - -# Full workflow (status + commit + push) -core dev work - -# Build the project -core build - -# Run tests -core go test # Go projects -core php test # PHP projects -``` - ---- - -## See Also - -- [dev work](../dev/work/) - Multi-repo operations -- [build](../build/) - Build projects -- [doctor](../doctor/) - Check environment diff --git a/docs/cmd/test/example.md b/docs/cmd/test/example.md deleted file mode 100644 index 9e2a4a7..0000000 --- a/docs/cmd/test/example.md +++ /dev/null @@ -1,8 +0,0 @@ -# Test Examples - -**Note:** Prefer `core go test` or `core php test` instead. - -```bash -core test -core test --coverage -``` diff --git a/docs/cmd/test/index.md b/docs/cmd/test/index.md deleted file mode 100644 index 920baea..0000000 --- a/docs/cmd/test/index.md +++ /dev/null @@ -1,74 +0,0 @@ -# core test - -Run Go tests with coverage reporting. - -Sets `MACOSX_DEPLOYMENT_TARGET=26.0` to suppress linker warnings on macOS. - -## Usage - -```bash -core test [flags] -``` - -## Flags - -| Flag | Description | -|------|-------------| -| `--coverage` | Show detailed per-package coverage | -| `--json` | Output JSON for CI/agents | -| `--pkg` | Package pattern to test (default: ./...) | -| `--race` | Enable race detector | -| `--run` | Run only tests matching this regex | -| `--short` | Skip long-running tests | -| `--verbose` | Show test output as it runs | - -## Examples - -```bash -# Run all tests with coverage summary -core test - -# Show test output as it runs -core test --verbose - -# Detailed per-package coverage -core test --coverage - -# Test specific packages -core test --pkg ./pkg/... - -# Run specific test by name -core test --run TestName - -# Run tests matching pattern -core test --run "Test.*Good" - -# Skip long-running tests -core test --short - -# Enable race detector -core test --race - -# Output JSON for CI/agents -core test --json -``` - -## JSON Output - -With `--json`, outputs structured results: - -```json -{ - "passed": 14, - "failed": 0, - "skipped": 0, - "coverage": 75.1, - "exit_code": 0, - "failed_packages": [] -} -``` - -## See Also - -- [go test](../go/test/) - Go-specific test options -- [go cov](../go/cov/) - Coverage reports diff --git a/docs/cmd/vm/example.md b/docs/cmd/vm/example.md deleted file mode 100644 index f31f97e..0000000 --- a/docs/cmd/vm/example.md +++ /dev/null @@ -1,52 +0,0 @@ -# VM Examples - -## Running VMs - -```bash -# Run image -core vm run server.iso - -# Detached with resources -core vm run -d --memory 4096 --cpus 4 server.iso - -# From template -core vm run --template core-dev --var SSH_KEY="ssh-rsa AAAA..." -``` - -## Management - -```bash -# List running -core vm ps - -# Include stopped -core vm ps -a - -# Stop -core vm stop abc123 - -# View logs -core vm logs abc123 - -# Follow logs -core vm logs -f abc123 - -# Execute command -core vm exec abc123 ls -la - -# Shell -core vm exec abc123 /bin/sh -``` - -## Templates - -```bash -# List -core vm templates - -# Show content -core vm templates show core-dev - -# Show variables -core vm templates vars core-dev -``` diff --git a/docs/cmd/vm/index.md b/docs/cmd/vm/index.md deleted file mode 100644 index ec0be0f..0000000 --- a/docs/cmd/vm/index.md +++ /dev/null @@ -1,163 +0,0 @@ -# core vm - -LinuxKit VM management. - -LinuxKit VMs are lightweight, immutable VMs built from YAML templates. -They run using qemu or hyperkit depending on your system. - -## Usage - -```bash -core vm [flags] -``` - -## Commands - -| Command | Description | -|---------|-------------| -| [`run`](#vm-run) | Run a LinuxKit image or template | -| [`ps`](#vm-ps) | List running VMs | -| [`stop`](#vm-stop) | Stop a VM | -| [`logs`](#vm-logs) | View VM logs | -| [`exec`](#vm-exec) | Execute command in VM | -| [templates](templates/) | Manage LinuxKit templates | - ---- - -## vm run - -Run a LinuxKit image or build from a template. - -```bash -core vm run [flags] -core vm run --template [flags] -``` - -Supported image formats: `.iso`, `.qcow2`, `.vmdk`, `.raw` - -### Flags - -| Flag | Description | -|------|-------------| -| `--template` | Run from a LinuxKit template (build + run) | -| `--var` | Template variable in KEY=VALUE format (repeatable) | -| `--name` | Name for the container | -| `--memory` | Memory in MB (default: 1024) | -| `--cpus` | CPU count (default: 1) | -| `--ssh-port` | SSH port for exec commands (default: 2222) | -| `-d` | Run in detached mode (background) | - -### Examples - -```bash -# Run from image file -core vm run image.iso - -# Run detached with more resources -core vm run -d image.qcow2 --memory 2048 --cpus 4 - -# Run from template -core vm run --template core-dev --var SSH_KEY="ssh-rsa AAAA..." - -# Multiple template variables -core vm run --template server-php --var SSH_KEY="..." --var DOMAIN=example.com -``` - ---- - -## vm ps - -List running VMs. - -```bash -core vm ps [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `-a` | Show all (including stopped) | - -### Output - -``` -ID NAME IMAGE STATUS STARTED PID -abc12345 myvm ...core-dev.qcow2 running 5m 12345 -``` - ---- - -## vm stop - -Stop a running VM by ID or name. - -```bash -core vm stop -``` - -Supports partial ID matching. - -### Examples - -```bash -# Full ID -core vm stop abc12345678 - -# Partial ID -core vm stop abc1 -``` - ---- - -## vm logs - -View VM logs. - -```bash -core vm logs [flags] -``` - -### Flags - -| Flag | Description | -|------|-------------| -| `-f` | Follow log output | - -### Examples - -```bash -# View logs -core vm logs abc12345 - -# Follow logs -core vm logs -f abc1 -``` - ---- - -## vm exec - -Execute a command in a running VM via SSH. - -```bash -core vm exec -``` - -### Examples - -```bash -# List files -core vm exec abc12345 ls -la - -# Open shell -core vm exec abc1 /bin/sh -``` - ---- - -## See Also - -- [templates](templates/) - Manage LinuxKit templates -- [build](../build/) - Build LinuxKit images -- [dev](../dev/) - Dev environment management diff --git a/docs/cmd/vm/templates/example.md b/docs/cmd/vm/templates/example.md deleted file mode 100644 index c1f8b35..0000000 --- a/docs/cmd/vm/templates/example.md +++ /dev/null @@ -1,53 +0,0 @@ -# VM Templates Examples - -## List - -```bash -core vm templates -``` - -## Show - -```bash -core vm templates show core-dev -``` - -## Variables - -```bash -core vm templates vars core-dev -``` - -## Output - -``` -Variables for core-dev: - SSH_KEY (required) SSH public key - MEMORY (optional) Memory in MB (default: 4096) - CPUS (optional) CPU count (default: 4) -``` - -## Using Templates - -```bash -core vm run --template core-dev --var SSH_KEY="ssh-rsa AAAA..." -``` - -## Template Format - -`.core/linuxkit/myserver.yml`: - -```yaml -kernel: - image: linuxkit/kernel:5.15 - cmdline: "console=tty0" - -init: - - linuxkit/init:v1.0.0 - -services: - - name: sshd - image: linuxkit/sshd:v1.0.0 - - name: myapp - image: ghcr.io/myorg/myapp:latest -``` diff --git a/docs/cmd/vm/templates/index.md b/docs/cmd/vm/templates/index.md deleted file mode 100644 index 7ca3700..0000000 --- a/docs/cmd/vm/templates/index.md +++ /dev/null @@ -1,124 +0,0 @@ -# core vm templates - -Manage LinuxKit templates for container images. - -## Usage - -```bash -core vm templates [command] -``` - -## Commands - -| Command | Description | -|---------|-------------| -| `list` | List available templates | -| `show` | Show template details | -| `vars` | Show template variables | - -## templates list - -List all available LinuxKit templates. - -```bash -core vm templates list -``` - -### Output - -``` -Available Templates: - - core-dev - Full development environment with 100+ tools - Platforms: linux/amd64, linux/arm64 - - server-php - FrankenPHP production server - Platforms: linux/amd64, linux/arm64 - - edge-node - Minimal edge deployment - Platforms: linux/amd64, linux/arm64 -``` - -## templates show - -Show details of a specific template. - -```bash -core vm templates show -``` - -### Example - -```bash -core vm templates show core-dev -``` - -Output: -``` -Template: core-dev - -Description: Full development environment with 100+ tools - -Platforms: - - linux/amd64 - - linux/arm64 - -Formats: - - iso - - qcow2 - -Services: - - sshd - - docker - - frankenphp - -Size: ~1.8GB -``` - -## templates vars - -Show variables defined by a template. - -```bash -core vm templates vars -``` - -### Example - -```bash -core vm templates vars core-dev -``` - -Output: -``` -Variables for core-dev: - SSH_KEY (required) SSH public key - MEMORY (optional) Memory in MB (default: 4096) - CPUS (optional) CPU count (default: 4) -``` - -## Template Locations - -Templates are searched in order: - -1. `.core/linuxkit/` - Project-specific -2. `~/.core/templates/` - User templates -3. Built-in templates - -## Creating Templates - -Create a LinuxKit YAML in `.core/linuxkit/`. See [Template Format](example.md#template-format) for examples. - -Run with: - -```bash -core vm run --template myserver -``` - -## See Also - -- [vm command](../) - Run LinuxKit images -- [build command](../../build/) - Build LinuxKit images diff --git a/docs/commands.md b/docs/commands.md new file mode 100644 index 0000000..46e2022 --- /dev/null +++ b/docs/commands.md @@ -0,0 +1,177 @@ +--- +title: Commands +description: Path-based command registration and CLI execution. +--- + +# Commands + +Commands are one of the most AX-native parts of CoreGO. The path is the identity. + +## Register a Command + +```go +c.Command("deploy/to/homelab", core.Command{ + Action: func(opts core.Options) core.Result { + target := opts.String("target") + return core.Result{Value: "deploying to " + target, OK: true} + }, +}) +``` + +## Command Paths + +Paths must be clean: + +- no empty path +- no leading slash +- no trailing slash +- no double slash + +These paths are valid: + +```text +deploy +deploy/to/homelab +workspace/create +``` + +These are rejected: + +```text +/deploy +deploy/ +deploy//to +``` + +## Parent Commands Are Auto-Created + +When you register `deploy/to/homelab`, CoreGO also creates placeholder parents if they do not already exist: + +- `deploy` +- `deploy/to` + +This makes the path tree navigable without extra setup. + +## Read a Command Back + +```go +r := c.Command("deploy/to/homelab") +if r.OK { + cmd := r.Value.(*core.Command) + _ = cmd +} +``` + +## Run a Command Directly + +```go +cmd := c.Command("deploy/to/homelab").Value.(*core.Command) + +r := cmd.Run(core.Options{ + {Key: "target", Value: "uk-prod"}, +}) +``` + +If `Action` is nil, `Run` returns `Result{OK:false}` with a structured error. + +## Run Through the CLI Surface + +```go +r := c.Cli().Run("deploy", "to", "homelab", "--target=uk-prod", "--debug") +``` + +`Cli.Run` resolves the longest matching command path from the arguments, then converts the remaining args into `core.Options`. + +## Flag Parsing Rules + +### Double Dash + +```text +--target=uk-prod -> key "target", value "uk-prod" +--debug -> key "debug", value true +``` + +### Single Dash + +```text +-v -> key "v", value true +-n=4 -> key "n", value "4" +``` + +### Positional Arguments + +Non-flag arguments after the command path are stored as repeated `_arg` options. + +```go +r := c.Cli().Run("workspace", "open", "alpha") +``` + +That produces an option like: + +```go +core.Option{Key: "_arg", Value: "alpha"} +``` + +### Important Details + +- flag values stay as strings +- `opts.Int("port")` only works if some code stored an actual `int` +- invalid flags such as `-verbose` and `--v` are ignored + +## Help Output + +`Cli.PrintHelp()` prints executable commands: + +```go +c.Cli().PrintHelp() +``` + +It skips: + +- hidden commands +- placeholder parents with no `Action` and no `Lifecycle` + +Descriptions are resolved through `cmd.I18nKey()`. + +## I18n Description Keys + +If `Description` is empty, CoreGO derives a key from the path. + +```text +deploy -> cmd.deploy.description +deploy/to/homelab -> cmd.deploy.to.homelab.description +workspace/create -> cmd.workspace.create.description +``` + +If `Description` is already set, CoreGO uses it as-is. + +## Lifecycle Commands + +Commands can also delegate to a lifecycle implementation. + +```go +type daemonCommand struct{} + +func (d *daemonCommand) Start(opts core.Options) core.Result { return core.Result{OK: true} } +func (d *daemonCommand) Stop() core.Result { return core.Result{OK: true} } +func (d *daemonCommand) Restart() core.Result { return core.Result{OK: true} } +func (d *daemonCommand) Reload() core.Result { return core.Result{OK: true} } +func (d *daemonCommand) Signal(sig string) core.Result { return core.Result{Value: sig, OK: true} } + +c.Command("agent/serve", core.Command{ + Lifecycle: &daemonCommand{}, +}) +``` + +Important behavior: + +- `Start` falls back to `Run` when `Lifecycle` is nil +- `Stop`, `Restart`, `Reload`, and `Signal` return an empty `Result` when `Lifecycle` is nil + +## List Command Paths + +```go +paths := c.Commands() +``` + +Like the service registry, the command registry is map-backed, so iteration order is not guaranteed. diff --git a/docs/configuration.md b/docs/configuration.md index deabb68..0a0cf11 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -1,357 +1,96 @@ +--- +title: Configuration +description: Constructor options, runtime settings, and feature flags. +--- + # Configuration -Core uses `.core/` directory for project configuration. +CoreGO uses two different configuration layers: -## Directory Structure +- constructor-time `core.Options` +- runtime `c.Config()` -``` -.core/ -├── release.yaml # Release configuration -├── build.yaml # Build configuration (optional) -├── php.yaml # PHP configuration (optional) -└── linuxkit/ # LinuxKit templates - ├── server.yml - └── dev.yml +## Constructor-Time Options + +```go +c := core.New(core.Options{ + {Key: "name", Value: "agent-workbench"}, +}) ``` -## release.yaml +### Current Behavior -Full release configuration reference: +- `New` accepts `opts ...Options` +- the current implementation copies only the first `Options` slice +- the `name` key is applied to `c.App().Name` -```yaml -version: 1 +If you need more constructor data, put it in the first `core.Options` slice. -project: - name: myapp - repository: myorg/myapp +## Runtime Settings with `Config` -build: - targets: - - os: linux - arch: amd64 - - os: linux - arch: arm64 - - os: darwin - arch: amd64 - - os: darwin - arch: arm64 - - os: windows - arch: amd64 +Use `c.Config()` for mutable process settings. -publishers: - # GitHub Releases (required - others reference these artifacts) - - type: github - prerelease: false - draft: false - - # npm binary wrapper - - type: npm - package: "@myorg/myapp" - access: public # or "restricted" - - # Homebrew formula - - type: homebrew - tap: myorg/homebrew-tap - formula: myapp - official: - enabled: false - output: dist/homebrew - - # Scoop manifest (Windows) - - type: scoop - bucket: myorg/scoop-bucket - official: - enabled: false - output: dist/scoop - - # AUR (Arch Linux) - - type: aur - maintainer: "Name " - - # Chocolatey (Windows) - - type: chocolatey - push: false # true to publish - - # Docker multi-arch - - type: docker - registry: ghcr.io - image: myorg/myapp - dockerfile: Dockerfile - platforms: - - linux/amd64 - - linux/arm64 - tags: - - latest - - "{{.Version}}" - build_args: - VERSION: "{{.Version}}" - - # LinuxKit images - - type: linuxkit - config: .core/linuxkit/server.yml - formats: - - iso - - qcow2 - - docker - platforms: - - linux/amd64 - - linux/arm64 - -changelog: - include: - - feat - - fix - - perf - - refactor - exclude: - - chore - - docs - - style - - test - - ci +```go +c.Config().Set("workspace.root", "/srv/workspaces") +c.Config().Set("max_agents", 8) +c.Config().Set("debug", true) ``` -## build.yaml +Read them back with: -Optional build configuration: - -```yaml -version: 1 - -project: - name: myapp - binary: myapp - -build: - main: ./cmd/myapp - env: - CGO_ENABLED: "0" - flags: - - -trimpath - ldflags: - - -s -w - - -X main.version={{.Version}} - - -X main.commit={{.Commit}} - -targets: - - os: linux - arch: amd64 - - os: darwin - arch: arm64 +```go +root := c.Config().String("workspace.root") +maxAgents := c.Config().Int("max_agents") +debug := c.Config().Bool("debug") +raw := c.Config().Get("workspace.root") ``` -## php.yaml +### Important Details -PHP/Laravel configuration: +- missing keys return zero values +- typed accessors do not coerce strings into ints or bools +- `Get` returns `core.Result` -```yaml -version: 1 +## Feature Flags -dev: - domain: myapp.test - ssl: true - port: 8000 - services: - - frankenphp - - vite - - horizon - - reverb - - redis +`Config` also tracks named feature flags. -test: - parallel: true - coverage: false - -deploy: - coolify: - server: https://coolify.example.com - project: my-project - environment: production +```go +c.Config().Enable("workspace.templates") +c.Config().Enable("agent.review") +c.Config().Disable("agent.review") ``` -## LinuxKit Templates +Read them with: -LinuxKit YAML configuration: - -```yaml -kernel: - image: linuxkit/kernel:6.6 - cmdline: "console=tty0 console=ttyS0" - -init: - - linuxkit/init:latest - - linuxkit/runc:latest - - linuxkit/containerd:latest - - linuxkit/ca-certificates:latest - -onboot: - - name: sysctl - image: linuxkit/sysctl:latest - -services: - - name: dhcpcd - image: linuxkit/dhcpcd:latest - - name: sshd - image: linuxkit/sshd:latest - - name: myapp - image: myorg/myapp:latest - capabilities: - - CAP_NET_BIND_SERVICE - -files: - - path: /etc/myapp/config.yaml - contents: | - server: - port: 8080 +```go +enabled := c.Config().Enabled("workspace.templates") +features := c.Config().EnabledFeatures() ``` -## repos.yaml +Feature names are case-sensitive. -Package registry for multi-repo workspaces: +## `ConfigVar[T]` -```yaml -# Organisation name (used for GitHub URLs) -org: host-uk +Use `ConfigVar[T]` when you need a typed value that can also represent “set versus unset”. -# Base path for cloning (default: current directory) -base_path: . +```go +theme := core.NewConfigVar("amber") -# Default settings for all repos -defaults: - ci: github - license: EUPL-1.2 - branch: main +if theme.IsSet() { + fmt.Println(theme.Get()) +} -# Repository definitions -repos: - # Foundation packages (no dependencies) - core-php: - type: foundation - description: Foundation framework - - core-devops: - type: foundation - description: Development environment - clone: false # Skip during setup (already exists) - - # Module packages (depend on foundation) - core-tenant: - type: module - depends_on: [core-php] - description: Multi-tenancy module - - core-admin: - type: module - depends_on: [core-php, core-tenant] - description: Admin panel - - core-api: - type: module - depends_on: [core-php] - description: REST API framework - - # Product packages (user-facing applications) - core-bio: - type: product - depends_on: [core-php, core-tenant] - description: Link-in-bio product - domain: bio.host.uk.com - - core-social: - type: product - depends_on: [core-php, core-tenant] - description: Social scheduling - domain: social.host.uk.com - - # Templates - core-template: - type: template - description: Starter template for new projects +theme.Unset() ``` -### repos.yaml Fields +This is useful for package-local state where zero values are not enough to describe configuration presence. -| Field | Required | Description | -|-------|----------|-------------| -| `org` | Yes | GitHub organisation name | -| `base_path` | No | Directory for cloning (default: `.`) | -| `defaults` | No | Default settings applied to all repos | -| `repos` | Yes | Map of repository definitions | +## Recommended Pattern -### Repository Fields +Use the two layers for different jobs: -| Field | Required | Description | -|-------|----------|-------------| -| `type` | Yes | `foundation`, `module`, `product`, or `template` | -| `description` | No | Human-readable description | -| `depends_on` | No | List of package dependencies | -| `clone` | No | Set `false` to skip during setup | -| `domain` | No | Production domain (for products) | -| `branch` | No | Override default branch | +- put startup identity such as `name` into `core.Options` +- put mutable runtime values and feature switches into `c.Config()` -### Package Types - -| Type | Description | Dependencies | -|------|-------------|--------------| -| `foundation` | Core framework packages | None | -| `module` | Reusable modules | Foundation packages | -| `product` | User-facing applications | Foundation + modules | -| `template` | Starter templates | Any | - ---- - -## Environment Variables - -Complete reference of environment variables used by Core CLI. - -### Authentication - -| Variable | Used By | Description | -|----------|---------|-------------| -| `GITHUB_TOKEN` | `core ci`, `core dev` | GitHub API authentication | -| `ANTHROPIC_API_KEY` | `core ai`, `core dev claude` | Claude API key | -| `AGENTIC_TOKEN` | `core ai task*` | Agentic API authentication | -| `AGENTIC_BASE_URL` | `core ai task*` | Agentic API endpoint | - -### Publishing - -| Variable | Used By | Description | -|----------|---------|-------------| -| `NPM_TOKEN` | `core ci` (npm publisher) | npm registry auth token | -| `CHOCOLATEY_API_KEY` | `core ci` (chocolatey publisher) | Chocolatey API key | -| `DOCKER_USERNAME` | `core ci` (docker publisher) | Docker registry username | -| `DOCKER_PASSWORD` | `core ci` (docker publisher) | Docker registry password | - -### Deployment - -| Variable | Used By | Description | -|----------|---------|-------------| -| `COOLIFY_URL` | `core php deploy` | Coolify server URL | -| `COOLIFY_TOKEN` | `core php deploy` | Coolify API token | -| `COOLIFY_APP_ID` | `core php deploy` | Production application ID | -| `COOLIFY_STAGING_APP_ID` | `core php deploy --staging` | Staging application ID | - -### Build - -| Variable | Used By | Description | -|----------|---------|-------------| -| `CGO_ENABLED` | `core build`, `core go *` | Enable/disable CGO (default: 0) | -| `GOOS` | `core build` | Target operating system | -| `GOARCH` | `core build` | Target architecture | - -### Configuration Paths - -| Variable | Description | -|----------|-------------| -| `CORE_CONFIG` | Override config directory (default: `~/.core/`) | -| `CORE_REGISTRY` | Override repos.yaml path | - ---- - -## Defaults - -If no configuration exists, sensible defaults are used: - -- **Targets**: linux/amd64, linux/arm64, darwin/amd64, darwin/arm64, windows/amd64 -- **Publishers**: GitHub only -- **Changelog**: feat, fix, perf, refactor included +That keeps constructor intent separate from live process state. diff --git a/docs/errors.md b/docs/errors.md new file mode 100644 index 0000000..9b7d3f3 --- /dev/null +++ b/docs/errors.md @@ -0,0 +1,120 @@ +--- +title: Errors +description: Structured errors, logging helpers, and panic recovery. +--- + +# Errors + +CoreGO treats failures as structured operational data. + +Repository convention: use `E()` instead of `fmt.Errorf` for framework and service errors. + +## `Err` + +The structured error type is: + +```go +type Err struct { + Operation string + Message string + Cause error + Code string +} +``` + +## Create Errors + +### `E` + +```go +err := core.E("workspace.Load", "failed to read workspace manifest", cause) +``` + +### `Wrap` + +```go +err := core.Wrap(cause, "workspace.Load", "manifest parse failed") +``` + +### `WrapCode` + +```go +err := core.WrapCode(cause, "WORKSPACE_INVALID", "workspace.Load", "manifest parse failed") +``` + +### `NewCode` + +```go +err := core.NewCode("NOT_FOUND", "workspace not found") +``` + +## Inspect Errors + +```go +op := core.Operation(err) +code := core.ErrorCode(err) +msg := core.ErrorMessage(err) +root := core.Root(err) +stack := core.StackTrace(err) +pretty := core.FormatStackTrace(err) +``` + +These helpers keep the operational chain visible without extra type assertions. + +## Join and Standard Wrappers + +```go +combined := core.ErrorJoin(err1, err2) +same := core.Is(combined, err1) +``` + +`core.As` and `core.NewError` mirror the standard library for convenience. + +## Log-and-Return Helpers + +`Core` exposes two convenience wrappers: + +```go +r1 := c.LogError(err, "workspace.Load", "workspace load failed") +r2 := c.LogWarn(err, "workspace.Load", "workspace load degraded") +``` + +These log through the default logger and return `core.Result`. + +You can also use the underlying `ErrorLog` directly: + +```go +r := c.Log().Error(err, "workspace.Load", "workspace load failed") +``` + +`Must` logs and then panics when the error is non-nil: + +```go +c.Must(err, "workspace.Load", "workspace load failed") +``` + +## Panic Recovery + +`ErrorPanic` handles process-safe panic capture. + +```go +defer c.Error().Recover() +``` + +Run background work with recovery: + +```go +c.Error().SafeGo(func() { + panic("captured") +}) +``` + +If `ErrorPanic` has a configured crash file path, it appends JSON crash reports and `Reports(n)` reads them back. + +That crash file path is currently internal state on `ErrorPanic`, not a public constructor option on `Core.New()`. + +## Logging and Error Context + +The logging subsystem automatically extracts `op` and logical stack information from structured errors when those values are present in the key-value list. + +That makes errors created with `E`, `Wrap`, or `WrapCode` much easier to follow in logs. diff --git a/docs/examples/build-docker-go.yaml b/docs/examples/build-docker-go.yaml deleted file mode 100644 index 4542d7a..0000000 --- a/docs/examples/build-docker-go.yaml +++ /dev/null @@ -1,42 +0,0 @@ -# Example: Go + Docker Build Configuration -# Build Go binary then containerize - -version: 1 - -project: - name: myservice - binary: myservice - -# First: build Go binary -build: - main: ./cmd/myservice - env: - CGO_ENABLED: "0" - GOOS: linux - ldflags: - - -s -w - - -X main.version={{.Version}} - -targets: - - os: linux - arch: amd64 - - os: linux - arch: arm64 - -# Then: build Docker image with the binary -docker: - dockerfile: Dockerfile - registry: ghcr.io - image: myorg/myservice - platforms: - - linux/amd64 - - linux/arm64 - tags: - - latest - - "{{.Version}}" - -# Dockerfile should COPY the built binary: -# -# FROM alpine:latest -# COPY myservice /usr/local/bin/myservice -# ENTRYPOINT ["/usr/local/bin/myservice"] diff --git a/docs/examples/build-docker.yaml b/docs/examples/build-docker.yaml deleted file mode 100644 index 3cd641d..0000000 --- a/docs/examples/build-docker.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# Example: Docker Build Configuration -# Multi-arch container image - -version: 1 - -project: - name: myservice - type: docker - -docker: - dockerfile: Dockerfile - context: . - registry: ghcr.io - image: myorg/myservice - - platforms: - - linux/amd64 - - linux/arm64 - - tags: - - latest - - "{{.Version}}" - - "{{.Version}}-alpine" - - build_args: - APP_VERSION: "{{.Version}}" - BUILD_DATE: "{{.Date}}" - - labels: - org.opencontainers.image.source: https://github.com/myorg/myservice - org.opencontainers.image.description: My Service - org.opencontainers.image.licenses: MIT - - # Optional: build stage target - target: production - - # Optional: cache settings - cache: - from: type=gha - to: type=gha,mode=max diff --git a/docs/examples/build-full.yaml b/docs/examples/build-full.yaml deleted file mode 100644 index bd4f35c..0000000 --- a/docs/examples/build-full.yaml +++ /dev/null @@ -1,121 +0,0 @@ -# Example: Full Build Configuration -# All available options - -version: 1 - -project: - name: myapp - binary: myapp - type: auto # auto, go, wails, docker, linuxkit, php - -build: - # Go build settings - main: ./cmd/myapp - - # Environment variables - env: - CGO_ENABLED: "0" - GOFLAGS: "-mod=readonly" - - # Build flags - flags: - - -trimpath - - -v - - # Linker flags - ldflags: - - -s -w - - -X main.version={{.Version}} - - -X main.commit={{.Commit}} - - -X main.date={{.Date}} - - -X main.builtBy=core - - # Build tags - tags: - - production - - netgo - -# Build targets -targets: - - os: linux - arch: amd64 - - os: linux - arch: arm64 - - os: linux - arch: "386" - - os: darwin - arch: amd64 - - os: darwin - arch: arm64 - - os: windows - arch: amd64 - - os: windows - arch: arm64 - - os: freebsd - arch: amd64 - -# Wails configuration (if type: wails) -wails: - frontend: ./frontend - install_cmd: install - build_cmd: build - dev_cmd: dev - -# Docker configuration (if type: docker or docker output enabled) -docker: - dockerfile: Dockerfile - context: . - registry: ghcr.io - image: myorg/myapp - platforms: - - linux/amd64 - - linux/arm64 - tags: - - latest - - "{{.Version}}" - build_args: - VERSION: "{{.Version}}" - labels: - org.opencontainers.image.source: https://github.com/myorg/myapp - target: production - cache: - from: type=gha - to: type=gha,mode=max - -# LinuxKit configuration (if type: linuxkit) -linuxkit: - config: .core/linuxkit/server.yml - formats: - - iso - - qcow2 - - docker - platforms: - - linux/amd64 - - linux/arm64 - -# Archive settings -archive: - format: tar.gz - format_windows: zip - name: "{{.Project}}-{{.Version}}-{{.OS}}-{{.Arch}}" - files: - - LICENSE - - README.md - - CHANGELOG.md - strip_parent: true - -# Checksum settings -checksum: - algorithm: sha256 - file: checksums.txt - -# Hooks -hooks: - pre_build: - - go generate ./... - - go mod tidy - post_build: - - echo "Build complete" - -# Output directory -output: dist diff --git a/docs/examples/build-go-cli.yaml b/docs/examples/build-go-cli.yaml deleted file mode 100644 index 22b21ca..0000000 --- a/docs/examples/build-go-cli.yaml +++ /dev/null @@ -1,39 +0,0 @@ -# Example: Go CLI Build Configuration -# Cross-platform CLI tool - -version: 1 - -project: - name: mycli - binary: mycli - -build: - main: ./cmd/mycli - env: - CGO_ENABLED: "0" - flags: - - -trimpath - ldflags: - - -s -w - - -X main.version={{.Version}} - - -X main.commit={{.Commit}} - - -X main.date={{.Date}} - -targets: - - os: linux - arch: amd64 - - os: linux - arch: arm64 - - os: darwin - arch: amd64 - - os: darwin - arch: arm64 - - os: windows - arch: amd64 - -archive: - format: tar.gz - format_windows: zip - files: - - LICENSE - - README.md diff --git a/docs/examples/build-go-library.yaml b/docs/examples/build-go-library.yaml deleted file mode 100644 index 63fe5fb..0000000 --- a/docs/examples/build-go-library.yaml +++ /dev/null @@ -1,23 +0,0 @@ -# Example: Go Library Build Configuration -# No binary output, just validation and testing - -version: 1 - -project: - name: mylib - type: library # No binary build - -build: - # Library-specific settings - env: - CGO_ENABLED: "0" - -# Test configuration -test: - race: true - cover: true - packages: - - ./... - -# No targets needed for library -# targets: [] diff --git a/docs/examples/build-go-wails.yaml b/docs/examples/build-go-wails.yaml deleted file mode 100644 index 8a952bc..0000000 --- a/docs/examples/build-go-wails.yaml +++ /dev/null @@ -1,46 +0,0 @@ -# Example: Wails Desktop App Build Configuration -# Cross-platform desktop application with web frontend - -version: 1 - -project: - name: myapp - binary: myapp - -build: - main: . - env: - CGO_ENABLED: "1" # Required for Wails - ldflags: - - -s -w - - -X main.version={{.Version}} - -# Wails-specific configuration -wails: - frontend: ./frontend - # Auto-detects: npm, pnpm, yarn, bun - install_cmd: install - build_cmd: build - -targets: - # Desktop platforms only - - os: darwin - arch: amd64 - - os: darwin - arch: arm64 - - os: windows - arch: amd64 - - os: linux - arch: amd64 - -# Platform-specific packaging -package: - darwin: - - dmg - - app - windows: - - nsis - - zip - linux: - - tar.gz - - appimage diff --git a/docs/examples/build-linuxkit.yaml b/docs/examples/build-linuxkit.yaml deleted file mode 100644 index 75ebb19..0000000 --- a/docs/examples/build-linuxkit.yaml +++ /dev/null @@ -1,33 +0,0 @@ -# Example: LinuxKit Build Configuration -# Immutable Linux images - -version: 1 - -project: - name: myserver - type: linuxkit - -linuxkit: - config: .core/linuxkit/server.yml - - formats: - - iso # Bootable ISO (BIOS/EFI) - - qcow2 # QEMU/KVM/Proxmox - - raw # Raw disk image - - vmdk # VMware - - docker # Docker-loadable tarball - - platforms: - - linux/amd64 - - linux/arm64 - - # Output naming - name: "{{.Project}}-{{.Version}}" - -# The linuxkit config file (.core/linuxkit/server.yml) defines: -# - kernel version -# - init system -# - services to run -# - files to include -# -# See linuxkit-server.yml example diff --git a/docs/examples/build-minimal.yaml b/docs/examples/build-minimal.yaml deleted file mode 100644 index 9801947..0000000 --- a/docs/examples/build-minimal.yaml +++ /dev/null @@ -1,7 +0,0 @@ -# Example: Minimal Build Configuration -# Auto-detects everything from project structure - -version: 1 - -project: - name: myapp diff --git a/docs/examples/build-multi-binary.yaml b/docs/examples/build-multi-binary.yaml deleted file mode 100644 index 563a357..0000000 --- a/docs/examples/build-multi-binary.yaml +++ /dev/null @@ -1,51 +0,0 @@ -# Example: Multi-Binary Build Configuration -# Multiple binaries from one repository - -version: 1 - -project: - name: mytools - -# Multiple build targets -builds: - - name: cli - binary: mytool - main: ./cmd/mytool - ldflags: - - -s -w - - -X main.version={{.Version}} - - - name: server - binary: myserver - main: ./cmd/server - ldflags: - - -s -w - - -X main.version={{.Version}} - - - name: worker - binary: myworker - main: ./cmd/worker - ldflags: - - -s -w - -# Shared settings -build: - env: - CGO_ENABLED: "0" - flags: - - -trimpath - -targets: - - os: linux - arch: amd64 - - os: linux - arch: arm64 - - os: darwin - arch: arm64 - -# Archive includes all binaries -archive: - format: tar.gz - files: - - LICENSE - - README.md diff --git a/docs/examples/build-php-laravel.yaml b/docs/examples/build-php-laravel.yaml deleted file mode 100644 index ae23cad..0000000 --- a/docs/examples/build-php-laravel.yaml +++ /dev/null @@ -1,50 +0,0 @@ -# Example: PHP/Laravel Build Configuration -# FrankenPHP container with Laravel app - -version: 1 - -project: - name: mylaravel - type: php - -php: - version: "8.4" - - # Composer settings - composer: - install_args: - - --no-dev - - --optimize-autoloader - - --no-interaction - - # Frontend build - frontend: - enabled: true - build_cmd: "npm run build" - - # Octane configuration - octane: - server: frankenphp - workers: auto - max_requests: 500 - -# Docker output -docker: - dockerfile: Dockerfile - registry: ghcr.io - image: myorg/mylaravel - platforms: - - linux/amd64 - - linux/arm64 - tags: - - latest - - "{{.Version}}" - build_args: - PHP_VERSION: "8.4" - -# Optional: LinuxKit for immutable deployment -linuxkit: - config: .core/linuxkit/server-php.yml - formats: - - qcow2 - - iso diff --git a/docs/examples/linuxkit-docker.yml b/docs/examples/linuxkit-docker.yml deleted file mode 100644 index 416ede1..0000000 --- a/docs/examples/linuxkit-docker.yml +++ /dev/null @@ -1,29 +0,0 @@ -# Example: LinuxKit Docker Format -# Build immutable container that loads with `docker load` - -kernel: - image: linuxkit/kernel:6.6 - cmdline: "console=tty0" - -init: - - linuxkit/init:latest - - linuxkit/runc:latest - - linuxkit/containerd:latest - -services: - - name: myservice - image: ghcr.io/myorg/myservice:latest - -# Use in release.yaml: -# -# publishers: -# - type: linuxkit -# config: .core/linuxkit/docker-format.yml -# formats: -# - docker # Outputs .docker.tar -# platforms: -# - linux/amd64 -# - linux/arm64 -# -# Load the image: -# docker load < linuxkit-v1.0.0-amd64.docker.tar diff --git a/docs/examples/linuxkit-server.yml b/docs/examples/linuxkit-server.yml deleted file mode 100644 index 7727ca6..0000000 --- a/docs/examples/linuxkit-server.yml +++ /dev/null @@ -1,51 +0,0 @@ -# Example: LinuxKit Server Configuration -# Minimal immutable Linux server with your application - -kernel: - image: linuxkit/kernel:6.6 - cmdline: "console=tty0 console=ttyS0" - -init: - - linuxkit/init:latest - - linuxkit/runc:latest - - linuxkit/containerd:latest - - linuxkit/ca-certificates:latest - -onboot: - - name: sysctl - image: linuxkit/sysctl:latest - - name: dhcpcd - image: linuxkit/dhcpcd:latest - -services: - # SSH for management - - name: sshd - image: linuxkit/sshd:latest - binds: - - /etc/ssh/authorized_keys:/root/.ssh/authorized_keys - - # Your application - - name: myapp - image: ghcr.io/myorg/myapp:latest - capabilities: - - CAP_NET_BIND_SERVICE - binds: - - /etc/myapp:/etc/myapp:ro - -files: - # SSH authorized keys - - path: /etc/ssh/authorized_keys - mode: "0600" - contents: | - ssh-ed25519 AAAA... your-key - - # Application config - - path: /etc/myapp/config.yaml - mode: "0644" - contents: | - server: - host: 0.0.0.0 - port: 8080 - database: - host: ${DB_HOST:-localhost} - port: ${DB_PORT:-5432} diff --git a/docs/examples/publish-all.yaml b/docs/examples/publish-all.yaml deleted file mode 100644 index a8f6b51..0000000 --- a/docs/examples/publish-all.yaml +++ /dev/null @@ -1,68 +0,0 @@ -# Example: All Publishers Combined -# Use in .core/release.yaml publishers array - -publishers: - # 1. GitHub - always first (others reference these artifacts) - - type: github - prerelease: false - draft: false - - # 2. npm - JavaScript ecosystem - - type: npm - package: "@myorg/mycli" - access: public - - # 3. Homebrew - macOS/Linux - - type: homebrew - tap: myorg/homebrew-tap - official: - enabled: true - output: dist/homebrew - - # 4. Scoop - Windows - - type: scoop - bucket: myorg/scoop-bucket - official: - enabled: true - output: dist/scoop - - # 5. AUR - Arch Linux - - type: aur - maintainer: "Your Name " - - # 6. Chocolatey - Windows enterprise - - type: chocolatey - push: false - - # 7. Docker - Containers - - type: docker - registry: ghcr.io - image: myorg/mycli - platforms: - - linux/amd64 - - linux/arm64 - tags: - - latest - - "{{.Version}}" - - # 8. LinuxKit - Immutable infrastructure - - type: linuxkit - config: .core/linuxkit/server.yml - formats: - - iso - - qcow2 - - docker - platforms: - - linux/amd64 - - linux/arm64 - -# Required environment variables: -# GITHUB_TOKEN - via gh CLI auth -# NPM_TOKEN - npm publish -# CHOCOLATEY_API_KEY - if push: true -# -# Required tools: -# gh - GitHub CLI -# npm - Node package manager -# docker - Docker with buildx -# linuxkit - LinuxKit CLI diff --git a/docs/examples/publish-aur.yaml b/docs/examples/publish-aur.yaml deleted file mode 100644 index 711c291..0000000 --- a/docs/examples/publish-aur.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# Example: AUR Publisher (Arch Linux) -# PKGBUILD generation and AUR push - -type: aur - -# Package name (will be suffixed with -bin) -package: mycli - -# Maintainer info (required by AUR) -maintainer: "Your Name " - -# Generate files only (don't push to AUR) -official: - enabled: true - output: dist/aur - -# Environment: SSH key for aur.archlinux.org -# -# Usage after publish: -# yay -S mycli-bin -# # or -# paru -S mycli-bin -# -# Generated files: -# - PKGBUILD -# - .SRCINFO -# -# Supports both x86_64 and aarch64 diff --git a/docs/examples/publish-chocolatey.yaml b/docs/examples/publish-chocolatey.yaml deleted file mode 100644 index 358ab39..0000000 --- a/docs/examples/publish-chocolatey.yaml +++ /dev/null @@ -1,29 +0,0 @@ -# Example: Chocolatey Publisher (Windows) -# NuSpec package for Windows enterprise - -type: chocolatey - -# Package name -package: mycli - -# Push to Chocolatey community repo -push: false # Set true to auto-publish - -# Generate files only -official: - enabled: true - output: dist/chocolatey - -# Environment: CHOCOLATEY_API_KEY required if push: true -# -# Usage after publish: -# choco install mycli -# -# Generated files: -# - mycli.nuspec -# - tools/chocolateyinstall.ps1 -# -# Manual publish: -# cd dist/chocolatey -# choco pack -# choco push mycli.1.0.0.nupkg --source https://push.chocolatey.org/ diff --git a/docs/examples/publish-docker.yaml b/docs/examples/publish-docker.yaml deleted file mode 100644 index d3d9063..0000000 --- a/docs/examples/publish-docker.yaml +++ /dev/null @@ -1,38 +0,0 @@ -# Example: Docker Publisher -# Multi-arch container images - -type: docker - -# Registry (default: ghcr.io) -registry: ghcr.io - -# Image name -image: myorg/myapp - -# Dockerfile path (default: Dockerfile) -dockerfile: Dockerfile - -# Target platforms -platforms: - - linux/amd64 - - linux/arm64 - -# Image tags -tags: - - latest - - "{{.Version}}" - - "{{.Version}}-alpine" - -# Build arguments -build_args: - VERSION: "{{.Version}}" - BUILD_DATE: "{{.Date}}" - -# Environment: Docker login to registry -# -# For ghcr.io: -# echo $GITHUB_TOKEN | docker login ghcr.io -u USERNAME --password-stdin -# -# Usage after publish: -# docker pull ghcr.io/myorg/myapp:latest -# docker run ghcr.io/myorg/myapp:v1.0.0 diff --git a/docs/examples/publish-github.yaml b/docs/examples/publish-github.yaml deleted file mode 100644 index 0fa37b3..0000000 --- a/docs/examples/publish-github.yaml +++ /dev/null @@ -1,14 +0,0 @@ -# Example: GitHub Releases Publisher -# Foundation publisher - others reference these artifacts - -type: github - -# Release settings -prerelease: false -draft: false - -# Auto-detect from git tag, or override -# version: v1.0.0 - -# Auto-detect from git remote, or specify -# repository: myorg/myapp diff --git a/docs/examples/publish-homebrew.yaml b/docs/examples/publish-homebrew.yaml deleted file mode 100644 index c186068..0000000 --- a/docs/examples/publish-homebrew.yaml +++ /dev/null @@ -1,29 +0,0 @@ -# Example: Homebrew Publisher -# Formula generation and tap management - -type: homebrew - -# Your tap repository -tap: myorg/homebrew-tap - -# Formula name (defaults to project name) -formula: mycli - -# Generate files for official homebrew-core PR -official: - enabled: true - output: dist/homebrew - -# Environment: Uses gh CLI authentication -# -# Usage after publish: -# brew tap myorg/tap -# brew install mycli -# -# Or directly: -# brew install myorg/tap/mycli -# -# Generated formula includes: -# - Multi-platform support (macOS Intel/ARM, Linux) -# - SHA256 checksums from GitHub release -# - Version from git tag diff --git a/docs/examples/publish-linuxkit.yaml b/docs/examples/publish-linuxkit.yaml deleted file mode 100644 index c83fb2a..0000000 --- a/docs/examples/publish-linuxkit.yaml +++ /dev/null @@ -1,36 +0,0 @@ -# Example: LinuxKit Publisher -# Immutable Linux images uploaded to GitHub release - -type: linuxkit - -# LinuxKit YAML configuration -config: .core/linuxkit/server.yml - -# Output formats -formats: - - iso # Bootable ISO (bare metal, VMs) - - qcow2 # QEMU/KVM/Proxmox - - raw # Raw disk image - - vmdk # VMware - - docker # Docker-loadable tarball - -# Target platforms -platforms: - - linux/amd64 - - linux/arm64 - -# Environment: linuxkit CLI installed -# -# Artifacts uploaded to GitHub release: -# - myapp-v1.0.0-amd64.iso -# - myapp-v1.0.0-amd64.qcow2 -# - myapp-v1.0.0-amd64.docker.tar -# - myapp-v1.0.0-arm64.iso -# - ... -# -# Usage: -# # Boot ISO -# qemu-system-x86_64 -cdrom myapp-v1.0.0-amd64.iso -m 1024 -# -# # Load Docker image -# docker load < myapp-v1.0.0-amd64.docker.tar diff --git a/docs/examples/publish-npm.yaml b/docs/examples/publish-npm.yaml deleted file mode 100644 index a34a912..0000000 --- a/docs/examples/publish-npm.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# Example: npm Publisher -# Binary wrapper pattern - downloads correct platform binary on install - -type: npm - -# Package name (scoped recommended) -package: "@myorg/mycli" - -# Access level -access: public # or "restricted" for private - -# Environment: NPM_TOKEN required -# -# Usage after publish: -# npm install -g @myorg/mycli -# npx @myorg/mycli --help -# -# The published package contains: -# - package.json -# - install.js (postinstall downloads binary) -# - bin/run.js (wrapper that executes binary) diff --git a/docs/examples/publish-scoop.yaml b/docs/examples/publish-scoop.yaml deleted file mode 100644 index a5c975a..0000000 --- a/docs/examples/publish-scoop.yaml +++ /dev/null @@ -1,23 +0,0 @@ -# Example: Scoop Publisher (Windows) -# JSON manifest for Windows package manager - -type: scoop - -# Your bucket repository -bucket: myorg/scoop-bucket - -# Generate files for official scoop-main PR -official: - enabled: true - output: dist/scoop - -# Environment: Uses gh CLI authentication -# -# Usage after publish: -# scoop bucket add myorg https://github.com/myorg/scoop-bucket -# scoop install mycli -# -# Generated manifest includes: -# - 64-bit and ARM64 Windows support -# - SHA256 checksums -# - Auto-update configuration diff --git a/docs/examples/release-full.yaml b/docs/examples/release-full.yaml deleted file mode 100644 index facc635..0000000 --- a/docs/examples/release-full.yaml +++ /dev/null @@ -1,98 +0,0 @@ -# Example: Full Release Configuration -# Complete configuration with all publishers - -version: 1 - -project: - name: core - repository: host-uk/core - -build: - targets: - - os: linux - arch: amd64 - - os: linux - arch: arm64 - - os: darwin - arch: amd64 - - os: darwin - arch: arm64 - - os: windows - arch: amd64 - - os: windows - arch: arm64 - -publishers: - # 1. GitHub Releases - always first, others reference these - - type: github - prerelease: false - draft: false - - # 2. npm - JavaScript ecosystem - - type: npm - package: "@host-uk/core" - access: public - - # 3. Homebrew - macOS/Linux - - type: homebrew - tap: host-uk/homebrew-tap - formula: core - # Generate files for official homebrew-core PR - official: - enabled: true - output: dist/homebrew - - # 4. Scoop - Windows - - type: scoop - bucket: host-uk/scoop-bucket - # Generate files for official scoop-main PR - official: - enabled: true - output: dist/scoop - - # 5. AUR - Arch Linux - - type: aur - maintainer: "Host UK " - - # 6. Chocolatey - Windows enterprise - - type: chocolatey - push: false # Manual review before push - - # 7. Docker - Container deployment - - type: docker - registry: ghcr.io - image: host-uk/core - dockerfile: Dockerfile - platforms: - - linux/amd64 - - linux/arm64 - tags: - - latest - - "{{.Version}}" - - "{{.Version}}-alpine" - - # 8. LinuxKit - Immutable infrastructure - - type: linuxkit - config: .core/linuxkit/core-server.yml - formats: - - iso # Bootable ISO for bare metal - - qcow2 # QEMU/KVM/Proxmox - - docker # Immutable container - platforms: - - linux/amd64 - - linux/arm64 - -changelog: - include: - - feat # New features - - fix # Bug fixes - - perf # Performance improvements - - refactor # Code refactoring - - security # Security fixes - exclude: - - chore - - docs - - style - - test - - ci - - build diff --git a/docs/examples/release-go-cli.yaml b/docs/examples/release-go-cli.yaml deleted file mode 100644 index 24fcec2..0000000 --- a/docs/examples/release-go-cli.yaml +++ /dev/null @@ -1,59 +0,0 @@ -# Example: Go CLI Release Configuration -# Publishes to GitHub, npm, Homebrew, Scoop, AUR, and Chocolatey - -version: 1 - -project: - name: mycli - repository: myorg/mycli - -build: - targets: - - os: linux - arch: amd64 - - os: linux - arch: arm64 - - os: darwin - arch: amd64 - - os: darwin - arch: arm64 - - os: windows - arch: amd64 - -publishers: - # GitHub Releases - foundation for all other publishers - - type: github - prerelease: false - draft: false - - # npm - binary wrapper pattern - # Users install via: npm install -g @myorg/mycli - - type: npm - package: "@myorg/mycli" - access: public - - # Homebrew - tap repository - # Users install via: brew install myorg/tap/mycli - - type: homebrew - tap: myorg/homebrew-tap - - # Scoop - Windows package manager - # Users install via: scoop bucket add myorg https://github.com/myorg/scoop-bucket && scoop install mycli - - type: scoop - bucket: myorg/scoop-bucket - - # AUR - Arch Linux User Repository - # Users install via: yay -S mycli-bin - - type: aur - maintainer: "Your Name " - - # Chocolatey - Windows enterprise - # Users install via: choco install mycli - - type: chocolatey - push: false # Set true to auto-publish - -changelog: - include: - - feat - - fix - - perf diff --git a/docs/examples/release-go-wails.yaml b/docs/examples/release-go-wails.yaml deleted file mode 100644 index c4d5eaf..0000000 --- a/docs/examples/release-go-wails.yaml +++ /dev/null @@ -1,36 +0,0 @@ -# Example: Wails Desktop App Release Configuration -# Builds cross-platform desktop app and publishes to GitHub - -version: 1 - -project: - name: myapp - repository: myorg/myapp - -build: - targets: - - os: darwin - arch: amd64 - - os: darwin - arch: arm64 - - os: windows - arch: amd64 - - os: linux - arch: amd64 - -publishers: - - type: github - prerelease: false - draft: true # Review before publishing - - # Homebrew cask for macOS - - type: homebrew - tap: myorg/homebrew-tap - formula: myapp - -changelog: - include: - - feat - - fix - - perf - - ui # Custom type for UI changes diff --git a/docs/examples/release-minimal.yaml b/docs/examples/release-minimal.yaml deleted file mode 100644 index 1f328d4..0000000 --- a/docs/examples/release-minimal.yaml +++ /dev/null @@ -1,11 +0,0 @@ -# Example: Minimal Release Configuration -# Just GitHub releases with defaults - -version: 1 - -project: - name: myapp - repository: myorg/myapp - -publishers: - - type: github diff --git a/docs/examples/release-official-repos.yaml b/docs/examples/release-official-repos.yaml deleted file mode 100644 index 1c36d7a..0000000 --- a/docs/examples/release-official-repos.yaml +++ /dev/null @@ -1,51 +0,0 @@ -# Example: Generate Files for Official Repository PRs -# Creates files for PRs to homebrew-core, scoop-main, etc. - -version: 1 - -project: - name: myapp - repository: myorg/myapp - -publishers: - - type: github - - # Generate formula for homebrew-core PR - # Output: dist/homebrew/myapp.rb - - type: homebrew - tap: myorg/homebrew-tap # Also push to own tap - official: - enabled: true - output: dist/homebrew - - # Generate manifest for scoop-main PR - # Output: dist/scoop/myapp.json - - type: scoop - bucket: myorg/scoop-bucket # Also push to own bucket - official: - enabled: true - output: dist/scoop - - # Generate files for AUR - # Output: dist/aur/PKGBUILD, dist/aur/.SRCINFO - - type: aur - maintainer: "Your Name " - official: - enabled: true - output: dist/aur - -# After release, submit PRs: -# -# Homebrew: -# cd homebrew-core -# cp ../myapp/dist/homebrew/myapp.rb Formula/m/myapp.rb -# git checkout -b myapp-1.0.0 -# git add . && git commit -m "myapp 1.0.0 (new formula)" -# gh pr create -# -# Scoop: -# cd Main -# cp ../myapp/dist/scoop/myapp.json bucket/myapp.json -# git checkout -b myapp-1.0.0 -# git add . && git commit -m "myapp: Add version 1.0.0" -# gh pr create diff --git a/docs/examples/release-php-laravel.yaml b/docs/examples/release-php-laravel.yaml deleted file mode 100644 index 8ebdbae..0000000 --- a/docs/examples/release-php-laravel.yaml +++ /dev/null @@ -1,42 +0,0 @@ -# Example: PHP/Laravel Release Configuration -# Builds Docker container and LinuxKit image - -version: 1 - -project: - name: mylaravel - repository: myorg/mylaravel - -publishers: - - type: github - prerelease: false - - # Docker container for deployment - - type: docker - registry: ghcr.io - image: myorg/mylaravel - dockerfile: Dockerfile - platforms: - - linux/amd64 - - linux/arm64 - tags: - - latest - - "{{.Version}}" - build_args: - PHP_VERSION: "8.4" - APP_ENV: production - - # LinuxKit for immutable server deployment - - type: linuxkit - config: .core/linuxkit/server-php.yml - formats: - - iso - - qcow2 - platforms: - - linux/amd64 - -changelog: - include: - - feat - - fix - - security diff --git a/docs/examples/sdk-full.yaml b/docs/examples/sdk-full.yaml deleted file mode 100644 index 52e1f5a..0000000 --- a/docs/examples/sdk-full.yaml +++ /dev/null @@ -1,43 +0,0 @@ -# Example: Full SDK Configuration -# Generate typed API clients from OpenAPI specs - -sdk: - # OpenAPI spec source (auto-detected if omitted) - spec: api/openapi.yaml - - # Languages to generate - languages: - - typescript - - python - - go - - php - - # Output directory (default: sdk/) - output: sdk/ - - # Package naming - package: - name: myapi - version: "{{.Version}}" - - # Breaking change detection - diff: - enabled: true - fail_on_breaking: true # CI fails on breaking changes - - # Optional: publish to monorepo - publish: - repo: myorg/sdks - path: packages/myapi - -# Required tools (install one per language): -# TypeScript: npm i -g openapi-typescript-codegen (or Docker) -# Python: pip install openapi-python-client (or Docker) -# Go: go install github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen@latest -# PHP: Docker required -# -# Usage: -# core sdk generate # Generate all configured languages -# core sdk generate --lang go # Generate single language -# core sdk diff --base v1.0.0 # Check for breaking changes -# core sdk validate # Validate spec diff --git a/docs/getting-started.md b/docs/getting-started.md index ad374ab..d2d8166 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -1,191 +1,208 @@ +--- +title: Getting Started +description: Build a first CoreGO application with the current API. +--- + # Getting Started -This guide walks you through installing Core and running your first build. +This page shows the shortest path to a useful CoreGO application using the API that exists in this repository today. -## Prerequisites - -Before installing Core, ensure you have: - -| Tool | Minimum Version | Check Command | -|------|-----------------|---------------| -| Go | 1.23+ | `go version` | -| Git | 2.30+ | `git --version` | - -Optional (for specific features): - -| Tool | Required For | Install | -|------|--------------|---------| -| `gh` | GitHub integration (`core dev issues`, `core dev reviews`) | [cli.github.com](https://cli.github.com) | -| Docker | Container builds | [docker.com](https://docker.com) | -| `task` | Task automation | `go install github.com/go-task/task/v3/cmd/task@latest` | - -## Installation - -### Option 1: Go Install (Recommended) +## Install ```bash -# Install latest release -go install github.com/host-uk/core/cmd/core@latest - -# Verify installation -core doctor +go get dappco.re/go/core ``` -If `core: command not found`, add Go's bin directory to your PATH: +## Create a Core -```bash -export PATH="$PATH:$(go env GOPATH)/bin" +`New` takes zero or more `core.Options` slices, but the current implementation only reads the first one. In practice, treat the constructor as `core.New(core.Options{...})`. + +```go +package main + +import "dappco.re/go/core" + +func main() { + c := core.New(core.Options{ + {Key: "name", Value: "agent-workbench"}, + }) + + _ = c +} ``` -### Option 2: Download Binary +The `name` option is copied into `c.App().Name`. -Download pre-built binaries from [GitHub Releases](https://github.com/host-uk/core/releases): +## Register a Service -```bash -# macOS (Apple Silicon) -curl -Lo core https://github.com/host-uk/core/releases/latest/download/core-darwin-arm64 -chmod +x core -sudo mv core /usr/local/bin/ +Services are registered explicitly with a name and a `core.Service` DTO. -# macOS (Intel) -curl -Lo core https://github.com/host-uk/core/releases/latest/download/core-darwin-amd64 -chmod +x core -sudo mv core /usr/local/bin/ - -# Linux (x86_64) -curl -Lo core https://github.com/host-uk/core/releases/latest/download/core-linux-amd64 -chmod +x core -sudo mv core /usr/local/bin/ +```go +c.Service("audit", core.Service{ + OnStart: func() core.Result { + core.Info("audit service started", "app", c.App().Name) + return core.Result{OK: true} + }, + OnStop: func() core.Result { + core.Info("audit service stopped", "app", c.App().Name) + return core.Result{OK: true} + }, +}) ``` -### Option 3: Build from Source +This registry stores `core.Service` values. It is a lifecycle registry, not a typed object container. -```bash -# Clone repository -git clone https://github.com/host-uk/core.git -cd core +## Register a Query, Task, and Command -# Build with Task (recommended) -task cli:build -# Binary at ./bin/core +```go +type workspaceCountQuery struct{} -# Or build with Go directly -CGO_ENABLED=0 go build -o core ./cmd/core/ -sudo mv core /usr/local/bin/ +type createWorkspaceTask struct { + Name string +} + +c.RegisterQuery(func(_ *core.Core, q core.Query) core.Result { + switch q.(type) { + case workspaceCountQuery: + return core.Result{Value: 1, OK: true} + } + return core.Result{} +}) + +c.RegisterTask(func(_ *core.Core, t core.Task) core.Result { + switch task := t.(type) { + case createWorkspaceTask: + path := "/tmp/agent-workbench/" + task.Name + return core.Result{Value: path, OK: true} + } + return core.Result{} +}) + +c.Command("workspace/create", core.Command{ + Action: func(opts core.Options) core.Result { + return c.PERFORM(createWorkspaceTask{ + Name: opts.String("name"), + }) + }, +}) ``` -## Your First Build +## Start the Runtime -### 1. Navigate to a Go Project - -```bash -cd ~/Code/my-go-project +```go +if !c.ServiceStartup(context.Background(), nil).OK { + panic("startup failed") +} ``` -### 2. Initialise Configuration +`ServiceStartup` returns `core.Result`, not `error`. -```bash -core setup +## Run Through the CLI Surface + +```go +r := c.Cli().Run("workspace", "create", "--name=alpha") +if r.OK { + fmt.Println("created:", r.Value) +} ``` -This detects your project type and creates configuration files in `.core/`: -- `build.yaml` - Build settings -- `release.yaml` - Release configuration -- `test.yaml` - Test commands +For flags with values, the CLI stores the value as a string. `--name=alpha` becomes `opts.String("name") == "alpha"`. -### 3. Build +## Query the System -```bash -core build +```go +count := c.QUERY(workspaceCountQuery{}) +if count.OK { + fmt.Println("workspace count:", count.Value) +} ``` -Output appears in `dist/`: +## Shut Down Cleanly -``` -dist/ -├── my-project-darwin-arm64.tar.gz -├── my-project-linux-amd64.tar.gz -└── CHECKSUMS.txt +```go +_ = c.ServiceShutdown(context.Background()) ``` -### 4. Cross-Compile (Optional) +Shutdown cancels `c.Context()`, broadcasts `ActionServiceShutdown{}`, waits for background tasks to finish, and then runs service stop hooks. -```bash -core build --targets linux/amd64,linux/arm64,darwin/arm64,windows/amd64 -``` +## Full Example -## Your First Release +```go +package main -Releases are **safe by default** - Core runs in dry-run mode unless you explicitly confirm. +import ( + "context" + "fmt" -### 1. Preview + "dappco.re/go/core" +) -```bash -core ci -``` +type workspaceCountQuery struct{} -This shows what would be published without actually publishing. +type createWorkspaceTask struct { + Name string +} -### 2. Publish +func main() { + c := core.New(core.Options{ + {Key: "name", Value: "agent-workbench"}, + }) -```bash -core ci --we-are-go-for-launch -``` + c.Config().Set("workspace.root", "/tmp/agent-workbench") + c.Config().Enable("workspace.templates") -This creates a GitHub release with your built artifacts. + c.Service("audit", core.Service{ + OnStart: func() core.Result { + core.Info("service started", "service", "audit") + return core.Result{OK: true} + }, + OnStop: func() core.Result { + core.Info("service stopped", "service", "audit") + return core.Result{OK: true} + }, + }) -## Multi-Repo Workflow + c.RegisterQuery(func(_ *core.Core, q core.Query) core.Result { + switch q.(type) { + case workspaceCountQuery: + return core.Result{Value: 1, OK: true} + } + return core.Result{} + }) -If you work with multiple repositories (like the host-uk ecosystem): + c.RegisterTask(func(_ *core.Core, t core.Task) core.Result { + switch task := t.(type) { + case createWorkspaceTask: + path := c.Config().String("workspace.root") + "/" + task.Name + return core.Result{Value: path, OK: true} + } + return core.Result{} + }) -### 1. Clone All Repositories + c.Command("workspace/create", core.Command{ + Action: func(opts core.Options) core.Result { + return c.PERFORM(createWorkspaceTask{ + Name: opts.String("name"), + }) + }, + }) -```bash -mkdir host-uk && cd host-uk -core setup -``` + if !c.ServiceStartup(context.Background(), nil).OK { + panic("startup failed") + } -Select packages in the interactive wizard. + created := c.Cli().Run("workspace", "create", "--name=alpha") + fmt.Println("created:", created.Value) -### 2. Check Status + count := c.QUERY(workspaceCountQuery{}) + fmt.Println("workspace count:", count.Value) -```bash -core dev health -# Output: "18 repos │ clean │ synced" -``` - -### 3. Work Across Repos - -```bash -core dev work --status # See status table -core dev work # Commit and push all dirty repos + _ = c.ServiceShutdown(context.Background()) +} ``` ## Next Steps -| Task | Command | Documentation | -|------|---------|---------------| -| Run tests | `core go test` | [go/test](cmd/go/test/) | -| Format code | `core go fmt --fix` | [go/fmt](cmd/go/fmt/) | -| Lint code | `core go lint` | [go/lint](cmd/go/lint/) | -| PHP development | `core php dev` | [php](cmd/php/) | -| View all commands | `core --help` | [cmd](cmd/) | - -## Getting Help - -```bash -# Check environment -core doctor - -# Command help -core --help - -# Full documentation -https://github.com/host-uk/core/tree/main/docs -``` - -## See Also - -- [Configuration](configuration.md) - All config options -- [Workflows](workflows.md) - Common task sequences -- [Troubleshooting](troubleshooting.md) - When things go wrong +- Read [primitives.md](primitives.md) next so the repeated shapes are clear. +- Read [commands.md](commands.md) if you are building a CLI-first system. +- Read [messaging.md](messaging.md) if services need to collaborate without direct imports. diff --git a/docs/glossary.md b/docs/glossary.md deleted file mode 100644 index ea9d280..0000000 --- a/docs/glossary.md +++ /dev/null @@ -1,112 +0,0 @@ -# Glossary - -Definitions of terms used throughout Core CLI documentation. - -## A - -### Artifact -A file produced by a build, typically a binary, archive, or checksum file. Artifacts are stored in the `dist/` directory and published during releases. - -## C - -### CGO -Go's mechanism for calling C code. Core disables CGO by default (`CGO_ENABLED=0`) to produce statically-linked binaries that don't depend on system libraries. - -### Changelog -Automatically generated list of changes between releases, created from conventional commit messages. Configure in `.core/release.yaml`. - -### Conventional Commits -A commit message format: `type(scope): description`. Types include `feat`, `fix`, `docs`, `chore`. Core uses this to generate changelogs. - -## D - -### Dry-run -A mode where commands show what they would do without actually doing it. `core ci` runs in dry-run mode by default for safety. - -## F - -### Foundation Package -A core package with no dependencies on other packages. Examples: `core-php`, `core-devops`. These form the base of the dependency tree. - -### FrankenPHP -A modern PHP application server used by `core php dev`. Combines PHP with Caddy for high-performance serving. - -## G - -### `gh` -The GitHub CLI tool. Required for commands that interact with GitHub: `core dev issues`, `core dev reviews`, `core dev ci`. - -## L - -### LinuxKit -A toolkit for building lightweight, immutable Linux distributions. Core can build LinuxKit images via `core build --type linuxkit`. - -## M - -### Module (Go) -A collection of Go packages with a `go.mod` file. Core's Go commands operate on modules. - -### Module (Package) -A host-uk package that depends on foundation packages. Examples: `core-tenant`, `core-admin`. Compare with **Foundation Package** and **Product**. - -## P - -### Package -An individual repository in the host-uk ecosystem. Packages are defined in `repos.yaml` and managed with `core pkg` commands. - -### Package Index -The `repos.yaml` file that lists all packages in a workspace. Contains metadata like dependencies, type, and description. - -### Product -A user-facing application package. Examples: `core-bio`, `core-social`. Products depend on foundation and module packages. - -### Publisher -A release target configured in `.core/release.yaml`. Types include `github`, `docker`, `npm`, `homebrew`, `linuxkit`. - -## R - -### Registry (Docker/npm) -A remote repository for container images or npm packages. Core can publish to registries during releases. - -### `repos.yaml` -The package index file defining all repositories in a workspace. Used by multi-repo commands like `core dev work`. - -## S - -### SDK -Software Development Kit. Core can generate API client SDKs from OpenAPI specs via `core build sdk`. - -## T - -### Target -A build target specified as `os/arch`, e.g., `linux/amd64`, `darwin/arm64`. Use `--targets` flag to specify. - -## W - -### Wails -A framework for building desktop applications with Go backends and web frontends. Core detects Wails projects and uses appropriate build commands. - -### Workspace (Go) -A Go 1.18+ feature for working with multiple modules simultaneously. Managed via `core go work` commands. - -### Workspace (Multi-repo) -A directory containing multiple packages from `repos.yaml`. Created via `core setup` and managed with `core dev` commands. - -## Symbols - -### `.core/` -Directory containing project configuration files: -- `build.yaml` - Build settings -- `release.yaml` - Release targets -- `test.yaml` - Test configuration -- `linuxkit/` - LinuxKit templates - -### `--we-are-go-for-launch` -Flag to disable dry-run mode and actually publish a release. Named as a deliberate friction to prevent accidental releases. - ---- - -## See Also - -- [Configuration](configuration.md) - Config file reference -- [Getting Started](getting-started.md) - First-time setup diff --git a/docs/index.md b/docs/index.md index 83f647e..0ec8647 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,98 +1,112 @@ -# Core CLI +--- +title: CoreGO +description: AX-first documentation for the CoreGO framework. +--- -Core is a unified CLI for the host-uk ecosystem - build, release, and deploy Go, Wails, PHP, and container workloads. +# CoreGO -## Installation +CoreGO is the foundation layer for the Core ecosystem. It gives you one container, one command tree, one message bus, and a small set of shared primitives that repeat across the whole framework. -```bash -# Via Go (recommended) -go install github.com/host-uk/core/cmd/core@latest +The current module path is `dappco.re/go/core`. -# Or download binary from releases -curl -Lo core https://github.com/host-uk/core/releases/latest/download/core-$(go env GOOS)-$(go env GOARCH) -chmod +x core && sudo mv core /usr/local/bin/ +## AX View -# Verify -core doctor +CoreGO already follows the main AX ideas from RFC-025: + +- predictable names such as `Core`, `Service`, `Command`, `Options`, `Result`, `Message` +- path-shaped command registration such as `deploy/to/homelab` +- one repeated input shape (`Options`) and one repeated return shape (`Result`) +- comments and examples that show real usage instead of restating the type signature + +## What CoreGO Owns + +| Surface | Purpose | +|---------|---------| +| `Core` | Central container and access point | +| `Service` | Managed lifecycle component | +| `Command` | Path-based command tree node | +| `ACTION`, `QUERY`, `PERFORM` | Decoupled communication between components | +| `Data`, `Drive`, `Fs`, `Config`, `I18n`, `Cli` | Built-in subsystems for common runtime work | +| `E`, `Wrap`, `ErrorLog`, `ErrorPanic` | Structured failures and panic recovery | + +## Quick Example + +```go +package main + +import ( + "context" + "fmt" + + "dappco.re/go/core" +) + +type flushCacheTask struct { + Name string +} + +func main() { + c := core.New(core.Options{ + {Key: "name", Value: "agent-workbench"}, + }) + + c.Service("cache", core.Service{ + OnStart: func() core.Result { + core.Info("cache ready", "app", c.App().Name) + return core.Result{OK: true} + }, + OnStop: func() core.Result { + core.Info("cache stopped", "app", c.App().Name) + return core.Result{OK: true} + }, + }) + + c.RegisterTask(func(_ *core.Core, task core.Task) core.Result { + switch task.(type) { + case flushCacheTask: + return core.Result{Value: "cache flushed", OK: true} + } + return core.Result{} + }) + + c.Command("cache/flush", core.Command{ + Action: func(opts core.Options) core.Result { + return c.PERFORM(flushCacheTask{Name: opts.String("name")}) + }, + }) + + if !c.ServiceStartup(context.Background(), nil).OK { + panic("startup failed") + } + + r := c.Cli().Run("cache", "flush", "--name=session-store") + fmt.Println(r.Value) + + _ = c.ServiceShutdown(context.Background()) +} ``` -See [Getting Started](getting-started.md) for all installation options including building from source. +## Documentation Paths -## Command Reference +| Path | Covers | +|------|--------| +| [getting-started.md](getting-started.md) | First runnable CoreGO app | +| [primitives.md](primitives.md) | `Options`, `Result`, `Service`, `Message`, `Query`, `Task` | +| [services.md](services.md) | Service registry, service locks, runtime helpers | +| [commands.md](commands.md) | Path-based commands and CLI execution | +| [messaging.md](messaging.md) | `ACTION`, `QUERY`, `QUERYALL`, `PERFORM`, `PerformAsync` | +| [lifecycle.md](lifecycle.md) | Startup, shutdown, context, background task draining | +| [configuration.md](configuration.md) | Constructor options, config state, feature flags | +| [subsystems.md](subsystems.md) | `App`, `Data`, `Drive`, `Fs`, `I18n`, `Cli` | +| [errors.md](errors.md) | Structured errors, logging helpers, panic recovery | +| [testing.md](testing.md) | Test naming and framework-level testing patterns | +| [pkg/core.md](pkg/core.md) | Package-level reference summary | +| [pkg/log.md](pkg/log.md) | Logging reference for the root package | +| [pkg/PACKAGE_STANDARDS.md](pkg/PACKAGE_STANDARDS.md) | AX package-authoring guidance | -See [cmd/](cmd/) for full command documentation. +## Good Reading Order -| Command | Description | -|---------|-------------| -| [go](cmd/go/) | Go development (test, fmt, lint, cov) | -| [php](cmd/php/) | Laravel/PHP development | -| [build](cmd/build/) | Build Go, Wails, Docker, LinuxKit projects | -| [ci](cmd/ci/) | Publish releases (dry-run by default) | -| [sdk](cmd/sdk/) | SDK generation and validation | -| [dev](cmd/dev/) | Multi-repo workflow + dev environment | -| [pkg](cmd/pkg/) | Package search and install | -| [vm](cmd/vm/) | LinuxKit VM management | -| [docs](cmd/docs/) | Documentation management | -| [setup](cmd/setup/) | Clone repos from registry | -| [doctor](cmd/doctor/) | Check development environment | - -## Quick Start - -```bash -# Go development -core go test # Run tests -core go test --coverage # With coverage -core go fmt # Format code -core go lint # Lint code - -# Build -core build # Auto-detect and build -core build --targets linux/amd64,darwin/arm64 - -# Release (dry-run by default) -core ci # Preview release -core ci --we-are-go-for-launch # Actually publish - -# Multi-repo workflow -core dev work # Status + commit + push -core dev work --status # Just show status - -# PHP development -core php dev # Start dev environment -core php test # Run tests -``` - -## Configuration - -Core uses `.core/` directory for project configuration: - -``` -.core/ -├── release.yaml # Release targets and settings -├── build.yaml # Build configuration (optional) -└── linuxkit/ # LinuxKit templates -``` - -And `repos.yaml` in workspace root for multi-repo management. - -## Guides - -- [Getting Started](getting-started.md) - Installation and first steps -- [Workflows](workflows.md) - Common task sequences -- [Troubleshooting](troubleshooting.md) - When things go wrong -- [Migration](migration.md) - Moving from legacy tools - -## Reference - -- [Configuration](configuration.md) - All config options -- [Glossary](glossary.md) - Term definitions - -## Claude Code Skill - -Install the skill to teach Claude Code how to use the Core CLI: - -```bash -curl -fsSL https://raw.githubusercontent.com/host-uk/core/main/.claude/skills/core/install.sh | bash -``` - -See [skill/](skill/) for details. +1. Start with [getting-started.md](getting-started.md). +2. Learn the repeated shapes in [primitives.md](primitives.md). +3. Pick the integration path you need next: [services.md](services.md), [commands.md](commands.md), or [messaging.md](messaging.md). +4. Use [subsystems.md](subsystems.md), [errors.md](errors.md), and [testing.md](testing.md) as reference pages while building. diff --git a/docs/lifecycle.md b/docs/lifecycle.md new file mode 100644 index 0000000..59ba644 --- /dev/null +++ b/docs/lifecycle.md @@ -0,0 +1,111 @@ +--- +title: Lifecycle +description: Startup, shutdown, context ownership, and background task draining. +--- + +# Lifecycle + +CoreGO manages lifecycle through `core.Service` callbacks, not through reflection or implicit interfaces. + +## Service Hooks + +```go +c.Service("cache", core.Service{ + OnStart: func() core.Result { + return core.Result{OK: true} + }, + OnStop: func() core.Result { + return core.Result{OK: true} + }, +}) +``` + +Only services with `OnStart` appear in `Startables()`. Only services with `OnStop` appear in `Stoppables()`. + +## `ServiceStartup` + +```go +r := c.ServiceStartup(context.Background(), nil) +``` + +### What It Does + +1. clears the shutdown flag +2. stores a new cancellable context on `c.Context()` +3. runs each `OnStart` +4. broadcasts `ActionServiceStartup{}` + +### Failure Behavior + +- if the input context is already cancelled, startup returns that error +- if any `OnStart` returns `OK:false`, startup stops immediately and returns that result + +## `ServiceShutdown` + +```go +r := c.ServiceShutdown(context.Background()) +``` + +### What It Does + +1. sets the shutdown flag +2. cancels `c.Context()` +3. broadcasts `ActionServiceShutdown{}` +4. waits for background tasks created by `PerformAsync` +5. runs each `OnStop` + +### Failure Behavior + +- if draining background tasks hits the shutdown context deadline, shutdown returns that context error +- when service stop hooks fail, CoreGO returns the first error it sees + +## Ordering + +The current implementation builds `Startables()` and `Stoppables()` by iterating over a map-backed registry. + +That means lifecycle order is not guaranteed today. + +If your application needs strict startup or shutdown ordering, orchestrate it explicitly inside a smaller number of service callbacks instead of relying on registry order. + +## `c.Context()` + +`ServiceStartup` creates the context returned by `c.Context()`. + +Use it for background work that should stop when the application shuts down: + +```go +c.Service("watcher", core.Service{ + OnStart: func() core.Result { + go func(ctx context.Context) { + <-ctx.Done() + }(c.Context()) + return core.Result{OK: true} + }, +}) +``` + +## Built-In Lifecycle Actions + +You can listen for lifecycle state changes through the action bus. + +```go +c.RegisterAction(func(_ *core.Core, msg core.Message) core.Result { + switch msg.(type) { + case core.ActionServiceStartup: + core.Info("core startup completed") + case core.ActionServiceShutdown: + core.Info("core shutdown started") + } + return core.Result{OK: true} +}) +``` + +## Background Task Draining + +`ServiceShutdown` waits for the internal task waitgroup to finish before calling stop hooks. + +This is what makes `PerformAsync` safe for long-running work that should complete before teardown. + +## `OnReload` + +`Service` includes an `OnReload` callback field, but CoreGO does not currently expose a top-level lifecycle runner for reload operations. diff --git a/docs/messaging.md b/docs/messaging.md new file mode 100644 index 0000000..688893a --- /dev/null +++ b/docs/messaging.md @@ -0,0 +1,171 @@ +--- +title: Messaging +description: ACTION, QUERY, QUERYALL, PERFORM, and async task flow. +--- + +# Messaging + +CoreGO uses one message bus for broadcasts, lookups, and work dispatch. + +## Message Types + +```go +type Message any +type Query any +type Task any +``` + +Your own structs define the protocol. + +```go +type repositoryIndexed struct { + Name string +} + +type repositoryCountQuery struct{} + +type syncRepositoryTask struct { + Name string +} +``` + +## `ACTION` + +`ACTION` is a broadcast. + +```go +c.RegisterAction(func(_ *core.Core, msg core.Message) core.Result { + switch m := msg.(type) { + case repositoryIndexed: + core.Info("repository indexed", "name", m.Name) + return core.Result{OK: true} + } + return core.Result{OK: true} +}) + +r := c.ACTION(repositoryIndexed{Name: "core-go"}) +``` + +### Behavior + +- all registered action handlers are called in their current registration order +- if a handler returns `OK:false`, dispatch stops and that `Result` is returned +- if no handler fails, `ACTION` returns `Result{OK:true}` + +## `QUERY` + +`QUERY` is first-match request-response. + +```go +c.RegisterQuery(func(_ *core.Core, q core.Query) core.Result { + switch q.(type) { + case repositoryCountQuery: + return core.Result{Value: 42, OK: true} + } + return core.Result{} +}) + +r := c.QUERY(repositoryCountQuery{}) +``` + +### Behavior + +- handlers run until one returns `OK:true` +- the first successful result wins +- if nothing handles the query, CoreGO returns an empty `Result` + +## `QUERYALL` + +`QUERYALL` collects every successful non-nil response. + +```go +r := c.QUERYALL(repositoryCountQuery{}) +results := r.Value.([]any) +``` + +### Behavior + +- every query handler is called +- only `OK:true` results with non-nil `Value` are collected +- the call itself returns `OK:true` even when the result list is empty + +## `PERFORM` + +`PERFORM` dispatches a task to the first handler that accepts it. + +```go +c.RegisterTask(func(_ *core.Core, t core.Task) core.Result { + switch task := t.(type) { + case syncRepositoryTask: + return core.Result{Value: "synced " + task.Name, OK: true} + } + return core.Result{} +}) + +r := c.PERFORM(syncRepositoryTask{Name: "core-go"}) +``` + +### Behavior + +- handlers run until one returns `OK:true` +- the first successful result wins +- if nothing handles the task, CoreGO returns an empty `Result` + +## `PerformAsync` + +`PerformAsync` runs a task in a background goroutine and returns a generated task identifier. + +```go +r := c.PerformAsync(syncRepositoryTask{Name: "core-go"}) +taskID := r.Value.(string) +``` + +### Generated Events + +Async execution emits three action messages: + +| Message | When | +|---------|------| +| `ActionTaskStarted` | just before background execution begins | +| `ActionTaskProgress` | whenever `Progress` is called | +| `ActionTaskCompleted` | after the task finishes or panics | + +Example listener: + +```go +c.RegisterAction(func(_ *core.Core, msg core.Message) core.Result { + switch m := msg.(type) { + case core.ActionTaskCompleted: + core.Info("task completed", "task", m.TaskIdentifier, "err", m.Error) + } + return core.Result{OK: true} +}) +``` + +## Progress Updates + +```go +c.Progress(taskID, 0.5, "indexing commits", syncRepositoryTask{Name: "core-go"}) +``` + +That broadcasts `ActionTaskProgress`. + +## `TaskWithIdentifier` + +Tasks that implement `TaskWithIdentifier` receive the generated ID before dispatch. + +```go +type trackedTask struct { + ID string + Name string +} + +func (t *trackedTask) SetTaskIdentifier(id string) { t.ID = id } +func (t *trackedTask) GetTaskIdentifier() string { return t.ID } +``` + +## Shutdown Interaction + +When shutdown has started, `PerformAsync` returns an empty `Result` instead of scheduling more work. + +This is why `ServiceShutdown` can safely drain the outstanding background tasks before stopping services. diff --git a/docs/migration.md b/docs/migration.md deleted file mode 100644 index e5c4606..0000000 --- a/docs/migration.md +++ /dev/null @@ -1,233 +0,0 @@ -# Migration Guide - -Migrating from legacy scripts and tools to Core CLI. - -## From push-all.sh - -The `push-all.sh` script has been replaced by `core dev` commands. - -| Legacy | Core CLI | Notes | -|--------|----------|-------| -| `./push-all.sh --status` | `core dev work --status` | Status table | -| `./push-all.sh --commit` | `core dev commit` | Commit dirty repos | -| `./push-all.sh` | `core dev work` | Full workflow | - -### Quick Migration - -```bash -# Instead of -./push-all.sh --status - -# Use -core dev work --status -``` - -### New Features - -Core CLI adds features not available in the legacy script: - -```bash -# Quick health summary -core dev health -# Output: "18 repos │ clean │ synced" - -# Pull repos that are behind -core dev pull - -# GitHub integration -core dev issues # List open issues -core dev reviews # List PRs needing review -core dev ci # Check CI status - -# Dependency analysis -core dev impact core-php # What depends on core-php? -``` - ---- - -## From Raw Go Commands - -Core wraps Go commands with enhanced defaults and output. - -| Raw Command | Core CLI | Benefits | -|-------------|----------|----------| -| `go test ./...` | `core go test` | Filters warnings, sets CGO_ENABLED=0 | -| `go test -coverprofile=...` | `core go cov` | HTML reports, thresholds | -| `gofmt -w .` | `core go fmt --fix` | Uses goimports if available | -| `golangci-lint run` | `core go lint` | Consistent interface | -| `go build` | `core build` | Cross-compile, sign, archive | - -### Why Use Core? - -```bash -# Raw go test shows linker warnings on macOS -go test ./... -# ld: warning: -no_pie is deprecated... - -# Core filters noise -core go test -# PASS (clean output) -``` - -### Environment Setup - -Core automatically sets: -- `CGO_ENABLED=0` - Static binaries -- `MACOSX_DEPLOYMENT_TARGET=26.0` - Suppress macOS warnings -- Colour output for coverage reports - ---- - -## From Raw PHP Commands - -Core orchestrates Laravel development services. - -| Raw Command | Core CLI | Benefits | -|-------------|----------|----------| -| `php artisan serve` | `core php dev` | Adds Vite, Horizon, Reverb, Redis | -| `./vendor/bin/pest` | `core php test` | Auto-detects test runner | -| `./vendor/bin/pint` | `core php fmt --fix` | Consistent interface | -| Manual Coolify deploy | `core php deploy` | Tracked, scriptable | - -### Development Server Comparison - -```bash -# Raw: Start each service manually -php artisan serve & -npm run dev & -php artisan horizon & -php artisan reverb:start & - -# Core: One command -core php dev -# Starts all services, shows unified logs -``` - ---- - -## From goreleaser - -Core's release system is simpler than goreleaser for host-uk projects. - -| goreleaser | Core CLI | -|------------|----------| -| `.goreleaser.yaml` | `.core/release.yaml` | -| `goreleaser release --snapshot` | `core ci` (dry-run) | -| `goreleaser release` | `core ci --we-are-go-for-launch` | - -### Configuration Migration - -**goreleaser:** -```yaml -builds: - - main: ./cmd/app - goos: [linux, darwin, windows] - goarch: [amd64, arm64] - -archives: - - format: tar.gz - files: [LICENSE, README.md] - -release: - github: - owner: host-uk - name: app -``` - -**Core:** -```yaml -version: 1 - -project: - name: app - repository: host-uk/app - -targets: - - os: linux - arch: amd64 - - os: darwin - arch: arm64 - -publishers: - - type: github -``` - -### Key Differences - -1. **Separate build and release** - Core separates `core build` from `core ci` -2. **Safe by default** - `core ci` is dry-run unless `--we-are-go-for-launch` -3. **Simpler config** - Fewer options, sensible defaults - ---- - -## From Manual Git Operations - -Core automates multi-repo git workflows. - -| Manual | Core CLI | -|--------|----------| -| `cd repo1 && git status && cd ../repo2 && ...` | `core dev work --status` | -| Check each repo for uncommitted changes | `core dev health` | -| Commit each repo individually | `core dev commit` | -| Push each repo individually | `core dev push` | - -### Example: Committing Across Repos - -**Manual:** -```bash -cd core-php -git add -A -git commit -m "feat: add feature" -cd ../core-tenant -git add -A -git commit -m "feat: use new feature" -# ... repeat for each repo -``` - -**Core:** -```bash -core dev commit -# Interactive: reviews changes, suggests messages -# Adds Co-Authored-By automatically -``` - ---- - -## Deprecated Commands - -These commands have been removed or renamed: - -| Deprecated | Replacement | Version | -|------------|-------------|---------| -| `core sdk generate` | `core build sdk` | v0.5.0 | -| `core dev task*` | `core ai task*` | v0.8.0 | -| `core release` | `core ci` | v0.6.0 | - ---- - -## Version Compatibility - -| Core Version | Go Version | Breaking Changes | -|--------------|------------|------------------| -| v1.0.0+ | 1.23+ | Stable API | -| v0.8.0 | 1.22+ | Task commands moved to `ai` | -| v0.6.0 | 1.22+ | Release command renamed to `ci` | -| v0.5.0 | 1.21+ | SDK generation moved to `build sdk` | - ---- - -## Getting Help - -If you encounter issues during migration: - -1. Check [Troubleshooting](troubleshooting.md) -2. Run `core doctor` to verify setup -3. Use `--help` on any command: `core dev work --help` - ---- - -## See Also - -- [Getting Started](getting-started.md) - Fresh installation -- [Workflows](workflows.md) - Common task sequences -- [Configuration](configuration.md) - Config file reference diff --git a/docs/pkg/PACKAGE_STANDARDS.md b/docs/pkg/PACKAGE_STANDARDS.md index c9462fe..398bbf6 100644 --- a/docs/pkg/PACKAGE_STANDARDS.md +++ b/docs/pkg/PACKAGE_STANDARDS.md @@ -1,566 +1,138 @@ -# Core Package Standards +# AX Package Standards -This document defines the standards for creating packages in the Core framework. The `pkg/i18n` package is the reference implementation; all new packages should follow its patterns. +This page describes how to build packages on top of CoreGO in the style described by RFC-025. -## Package Structure +## 1. Prefer Predictable Names -A well-structured Core package follows this layout: +Use names that tell an agent what the thing is without translation. -``` -pkg/mypackage/ -├── types.go # Public types, constants, interfaces -├── service.go # Service struct with framework integration -├── mypackage.go # Global convenience functions -├── actions.go # ACTION messages for Core IPC (if needed) -├── hooks.go # Event hooks with atomic handlers (if needed) -├── [feature].go # Additional feature files -├── [feature]_test.go # Tests alongside implementation -└── service_test.go # Service tests -``` +Good: -## Core Principles +- `RepositoryService` +- `RepositoryServiceOptions` +- `WorkspaceCountQuery` +- `SyncRepositoryTask` -1. **Service-oriented**: Packages expose a `Service` struct that integrates with the Core framework -2. **Thread-safe**: All public APIs must be safe for concurrent use -3. **Global convenience**: Provide package-level functions that use a default service instance -4. **Options pattern**: Use functional options for configuration -5. **ACTION-based IPC**: Communicate via Core's ACTION system, not callbacks +Avoid shortening names unless the abbreviation is already universal. ---- +## 2. Put Real Usage in Comments -## Service Pattern +Write comments that show a real call with realistic values. -### Service Struct - -Embed `framework.ServiceRuntime[T]` for Core integration: +Good: ```go -// pkg/mypackage/service.go -package mypackage - -import ( - "sync" - "github.com/host-uk/core/pkg/framework" -) - -// Service provides mypackage functionality with Core integration. -type Service struct { - *framework.ServiceRuntime[Options] - - // Internal state (protected by mutex) - data map[string]any - mu sync.RWMutex -} - -// Options configures the service. -type Options struct { - // Document each option - BufferSize int - EnableFoo bool -} +// Sync a repository into the local workspace cache. +// svc.SyncRepository("core-go", "/srv/repos/core-go") ``` -### Service Factory +Avoid comments that only repeat the signature. -Create a factory function for Core registration: +## 3. Keep Paths Semantic + +If a command or template lives at a path, let the path explain the intent. + +Good: + +```text +deploy/to/homelab +workspace/create +template/workspace/go +``` + +That keeps the CLI, tests, docs, and message vocabulary aligned. + +## 4. Reuse CoreGO Primitives + +At Core boundaries, prefer the shared shapes: + +- `core.Options` for lightweight input +- `core.Result` for output +- `core.Service` for lifecycle registration +- `core.Message`, `core.Query`, `core.Task` for bus protocols + +Inside your package, typed structs are still good. Use `ServiceRuntime[T]` when you want typed package options plus a `Core` reference. ```go -// NewService creates a service factory for Core registration. -// -// core, _ := framework.New( -// framework.WithName("mypackage", mypackage.NewService(mypackage.Options{})), -// ) -func NewService(opts Options) func(*framework.Core) (any, error) { - return func(c *framework.Core) (any, error) { - // Apply defaults - if opts.BufferSize == 0 { - opts.BufferSize = DefaultBufferSize - } +type repositoryServiceOptions struct { + BaseDirectory string +} - svc := &Service{ - ServiceRuntime: framework.NewServiceRuntime(c, opts), - data: make(map[string]any), - } - return svc, nil - } +type repositoryService struct { + *core.ServiceRuntime[repositoryServiceOptions] } ``` -### Lifecycle Hooks +## 5. Prefer Explicit Registration -Implement `framework.Startable` and/or `framework.Stoppable`: +Register services and commands with names and paths that stay readable in grep results. ```go -// OnStartup implements framework.Startable. -func (s *Service) OnStartup(ctx context.Context) error { - // Register query/task handlers - s.Core().RegisterQuery(s.handleQuery) - s.Core().RegisterAction(s.handleAction) - return nil -} - -// OnShutdown implements framework.Stoppable. -func (s *Service) OnShutdown(ctx context.Context) error { - // Cleanup resources - return nil -} +c.Service("repository", core.Service{...}) +c.Command("repository/sync", core.Command{...}) ``` ---- +## 6. Use the Bus for Decoupling -## Global Default Pattern - -Following `pkg/i18n`, provide a global default service with atomic access: +When one package needs another package’s behavior, prefer queries and tasks over tight package coupling. ```go -// pkg/mypackage/mypackage.go -package mypackage - -import ( - "sync" - "sync/atomic" - - "github.com/host-uk/core/pkg/framework" -) - -// Global default service -var ( - defaultService atomic.Pointer[Service] - defaultOnce sync.Once - defaultErr error -) - -// Default returns the global service instance. -// Returns nil if not initialised. -func Default() *Service { - return defaultService.Load() -} - -// SetDefault sets the global service instance. -// Thread-safe. Panics if s is nil. -func SetDefault(s *Service) { - if s == nil { - panic("mypackage: SetDefault called with nil service") - } - defaultService.Store(s) -} - -// Init initialises the default service with a Core instance. -func Init(c *framework.Core) error { - defaultOnce.Do(func() { - factory := NewService(Options{}) - svc, err := factory(c) - if err != nil { - defaultErr = err - return - } - defaultService.Store(svc.(*Service)) - }) - return defaultErr +type repositoryCountQuery struct{} +type syncRepositoryTask struct { + Name string } ``` -### Global Convenience Functions +That keeps the protocol visible in code and easy for agents to follow. -Expose the most common operations at package level: +## 7. Use Structured Errors + +Use `core.E`, `core.Wrap`, and `core.WrapCode`. ```go -// ErrServiceNotInitialised is returned when the service is not initialised. -var ErrServiceNotInitialised = errors.New("mypackage: service not initialised") - -// DoSomething performs an operation using the default service. -func DoSomething(arg string) (string, error) { - svc := Default() - if svc == nil { - return "", ErrServiceNotInitialised - } - return svc.DoSomething(arg) +return core.Result{ + Value: core.E("repository.Sync", "git fetch failed", err), + OK: false, } ``` ---- +Do not introduce free-form `fmt.Errorf` chains in framework code. -## Options Pattern +## 8. Keep Testing Names Predictable -Use functional options for complex configuration: +Follow the repository pattern: + +- `_Good` +- `_Bad` +- `_Ugly` + +Example: ```go -// Option configures a Service during construction. -type Option func(*Service) - -// WithBufferSize sets the buffer size. -func WithBufferSize(size int) Option { - return func(s *Service) { - s.bufSize = size - } -} - -// WithFoo enables foo feature. -func WithFoo(enabled bool) Option { - return func(s *Service) { - s.fooEnabled = enabled - } -} - -// New creates a service with options. -func New(opts ...Option) (*Service, error) { - s := &Service{ - bufSize: DefaultBufferSize, - } - for _, opt := range opts { - opt(s) - } - return s, nil -} +func TestRepositorySync_Good(t *testing.T) {} +func TestRepositorySync_Bad(t *testing.T) {} +func TestRepositorySync_Ugly(t *testing.T) {} ``` ---- +## 9. Prefer Stable Shapes Over Clever APIs -## ACTION Messages (IPC) +For package APIs, avoid patterns that force an agent to infer too much hidden control flow. -For services that need to communicate events, define ACTION message types: +Prefer: -```go -// pkg/mypackage/actions.go -package mypackage +- clear structs +- explicit names +- path-based commands +- visible message types -import "time" +Avoid: -// ActionItemCreated is broadcast when an item is created. -type ActionItemCreated struct { - ID string - Name string - CreatedAt time.Time -} +- implicit global state unless it is truly a default service +- panic-hiding constructors +- dense option chains when a small explicit struct would do -// ActionItemUpdated is broadcast when an item changes. -type ActionItemUpdated struct { - ID string - Changes map[string]any -} +## 10. Document the Current Reality -// ActionItemDeleted is broadcast when an item is removed. -type ActionItemDeleted struct { - ID string -} -``` +If the implementation is in transition, document what the code does now, not the API shape you plan to have later. -Dispatch actions via `s.Core().ACTION()`: - -```go -func (s *Service) CreateItem(name string) (*Item, error) { - item := &Item{ID: generateID(), Name: name} - - // Store item... - - // Broadcast to listeners - s.Core().ACTION(ActionItemCreated{ - ID: item.ID, - Name: item.Name, - CreatedAt: time.Now(), - }) - - return item, nil -} -``` - -Consumers register handlers: - -```go -core.RegisterAction(func(c *framework.Core, msg framework.Message) error { - switch m := msg.(type) { - case mypackage.ActionItemCreated: - log.Printf("Item created: %s", m.Name) - case mypackage.ActionItemDeleted: - log.Printf("Item deleted: %s", m.ID) - } - return nil -}) -``` - ---- - -## Hooks Pattern - -For user-customisable behaviour, use atomic handlers (see `pkg/i18n/hooks.go`): - -```go -// pkg/mypackage/hooks.go -package mypackage - -import ( - "sync/atomic" -) - -// ErrorHandler is called when an error occurs. -type ErrorHandler func(err error) - -var errorHandler atomic.Value // stores ErrorHandler - -// OnError registers an error handler. -// Thread-safe. Pass nil to clear. -func OnError(h ErrorHandler) { - if h == nil { - errorHandler.Store((ErrorHandler)(nil)) - return - } - errorHandler.Store(h) -} - -// dispatchError calls the registered error handler. -func dispatchError(err error) { - v := errorHandler.Load() - if v == nil { - return - } - h, ok := v.(ErrorHandler) - if !ok || h == nil { - return - } - h(err) -} -``` - ---- - -## Thread Safety - -### Mutex Patterns - -Use `sync.RWMutex` for state that is read more than written: - -```go -type Service struct { - data map[string]any - mu sync.RWMutex -} - -func (s *Service) Get(key string) (any, bool) { - s.mu.RLock() - defer s.mu.RUnlock() - v, ok := s.data[key] - return v, ok -} - -func (s *Service) Set(key string, value any) { - s.mu.Lock() - defer s.mu.Unlock() - s.data[key] = value -} -``` - -### Atomic Values - -Use `atomic.Pointer[T]` for single values accessed frequently: - -```go -var config atomic.Pointer[Config] - -func GetConfig() *Config { - return config.Load() -} - -func SetConfig(c *Config) { - config.Store(c) -} -``` - ---- - -## Error Handling - -### Error Types - -Define package-level errors: - -```go -// Errors -var ( - ErrNotFound = errors.New("mypackage: not found") - ErrInvalidArg = errors.New("mypackage: invalid argument") - ErrNotRunning = errors.New("mypackage: not running") -) -``` - -### Wrapped Errors - -Use `fmt.Errorf` with `%w` for context: - -```go -func (s *Service) Load(path string) error { - data, err := os.ReadFile(path) - if err != nil { - return fmt.Errorf("failed to load config: %w", err) - } - // ... -} -``` - -### Error Struct (optional) - -For errors needing additional context: - -```go -type ServiceError struct { - Op string // Operation that failed - Path string // Resource path - Err error // Underlying error -} - -func (e *ServiceError) Error() string { - return fmt.Sprintf("%s %s: %v", e.Op, e.Path, e.Err) -} - -func (e *ServiceError) Unwrap() error { - return e.Err -} -``` - ---- - -## Testing - -### Test File Organisation - -Place tests alongside implementation: - -``` -mypackage.go → mypackage_test.go -service.go → service_test.go -buffer.go → buffer_test.go -``` - -### Test Helpers - -Create helpers for common setup: - -```go -func newTestService(t *testing.T) (*Service, *framework.Core) { - t.Helper() - - core, err := framework.New( - framework.WithName("mypackage", NewService(Options{})), - ) - require.NoError(t, err) - - svc, err := framework.ServiceFor[*Service](core, "mypackage") - require.NoError(t, err) - - return svc, core -} -``` - -### Test Naming Convention - -Use descriptive subtests: - -```go -func TestService_DoSomething(t *testing.T) { - t.Run("valid input", func(t *testing.T) { - // ... - }) - - t.Run("empty input returns error", func(t *testing.T) { - // ... - }) - - t.Run("concurrent access", func(t *testing.T) { - // ... - }) -} -``` - -### Testing Actions - -Verify ACTION broadcasts: - -```go -func TestService_BroadcastsActions(t *testing.T) { - core, _ := framework.New( - framework.WithName("mypackage", NewService(Options{})), - ) - - var received []ActionItemCreated - var mu sync.Mutex - - core.RegisterAction(func(c *framework.Core, msg framework.Message) error { - if m, ok := msg.(ActionItemCreated); ok { - mu.Lock() - received = append(received, m) - mu.Unlock() - } - return nil - }) - - svc, _ := framework.ServiceFor[*Service](core, "mypackage") - svc.CreateItem("test") - - mu.Lock() - assert.Len(t, received, 1) - assert.Equal(t, "test", received[0].Name) - mu.Unlock() -} -``` - ---- - -## Documentation - -### Package Doc - -Every package needs a doc comment in the main file: - -```go -// Package mypackage provides functionality for X. -// -// # Getting Started -// -// svc, err := mypackage.New() -// result := svc.DoSomething("input") -// -// # Core Integration -// -// core, _ := framework.New( -// framework.WithName("mypackage", mypackage.NewService(mypackage.Options{})), -// ) -package mypackage -``` - -### Function Documentation - -Document public functions with examples: - -```go -// DoSomething performs X operation with the given input. -// Returns ErrInvalidArg if input is empty. -// -// result, err := svc.DoSomething("hello") -// if err != nil { -// return err -// } -func (s *Service) DoSomething(input string) (string, error) { - // ... -} -``` - ---- - -## Checklist - -When creating a new package, ensure: - -- [ ] `Service` struct embeds `framework.ServiceRuntime[Options]` -- [ ] `NewService()` factory function for Core registration -- [ ] `Default()` / `SetDefault()` with `atomic.Pointer` -- [ ] Package-level convenience functions -- [ ] Thread-safe public APIs (mutex or atomic) -- [ ] ACTION messages for events (if applicable) -- [ ] Hooks with atomic handlers (if applicable) -- [ ] Comprehensive tests with helpers -- [ ] Package documentation with examples - -## Reference Implementations - -- **`pkg/i18n`** - Full reference with handlers, modes, hooks, grammar -- **`pkg/process`** - Simpler example with ACTION events and runner orchestration -- **`pkg/cli`** - Service integration with runtime lifecycle +That keeps agents correct on first pass, which is the real AX metric. diff --git a/docs/pkg/core.md b/docs/pkg/core.md new file mode 100644 index 0000000..88bd18b --- /dev/null +++ b/docs/pkg/core.md @@ -0,0 +1,81 @@ +# Package Reference: `core` + +Import path: + +```go +import "dappco.re/go/core" +``` + +This repository exposes one root package. The main areas are: + +## Constructors and Accessors + +| Name | Purpose | +|------|---------| +| `New` | Create a `*Core` | +| `NewRuntime` | Create an empty runtime wrapper | +| `NewWithFactories` | Create a runtime wrapper from named service factories | +| `Options`, `App`, `Data`, `Drive`, `Fs`, `Config`, `Error`, `Log`, `Cli`, `IPC`, `I18n`, `Context` | Access the built-in subsystems | + +## Core Primitives + +| Name | Purpose | +|------|---------| +| `Option`, `Options` | Input configuration and metadata | +| `Result` | Shared output shape | +| `Service` | Lifecycle DTO | +| `Command` | Command tree node | +| `Message`, `Query`, `Task` | Message bus payload types | + +## Service and Runtime APIs + +| Name | Purpose | +|------|---------| +| `Service` | Register or read a named service | +| `Services` | List registered service names | +| `Startables`, `Stoppables` | Snapshot lifecycle-capable services | +| `LockEnable`, `LockApply` | Activate the service registry lock | +| `ServiceRuntime[T]` | Helper for package authors | + +## Command and CLI APIs + +| Name | Purpose | +|------|---------| +| `Command` | Register or read a command by path | +| `Commands` | List command paths | +| `Cli().Run` | Resolve arguments to a command and execute it | +| `Cli().PrintHelp` | Show executable commands | + +## Messaging APIs + +| Name | Purpose | +|------|---------| +| `ACTION`, `Action` | Broadcast a message | +| `QUERY`, `Query` | Return the first successful query result | +| `QUERYALL`, `QueryAll` | Collect all successful query results | +| `PERFORM`, `Perform` | Run the first task handler that accepts the task | +| `PerformAsync` | Run a task in the background | +| `Progress` | Broadcast async task progress | +| `RegisterAction`, `RegisterActions`, `RegisterQuery`, `RegisterTask` | Register bus handlers | + +## Subsystems + +| Name | Purpose | +|------|---------| +| `Config` | Runtime settings and feature flags | +| `Data` | Embedded filesystem mounts | +| `Drive` | Named transport handles | +| `Fs` | Local filesystem operations | +| `I18n` | Locale collection and translation delegation | +| `App`, `Find` | Application identity and executable lookup | + +## Errors and Logging + +| Name | Purpose | +|------|---------| +| `E`, `Wrap`, `WrapCode`, `NewCode` | Structured error creation | +| `Operation`, `ErrorCode`, `ErrorMessage`, `Root`, `StackTrace`, `FormatStackTrace` | Error inspection | +| `NewLog`, `Default`, `SetDefault`, `SetLevel`, `SetRedactKeys` | Logger creation and defaults | +| `LogErr`, `LogPanic`, `ErrorLog`, `ErrorPanic` | Error-aware logging and panic recovery | + +Use the top-level docs in `docs/` for task-oriented guidance, then use this page as a compact reference. diff --git a/docs/pkg/i18n/EXTENDING.md b/docs/pkg/i18n/EXTENDING.md deleted file mode 100644 index b979b0a..0000000 --- a/docs/pkg/i18n/EXTENDING.md +++ /dev/null @@ -1,399 +0,0 @@ -# Extending the i18n Package - -This guide covers how to extend the i18n package with custom loaders, handlers, and integrations. - -## Custom Loaders - -The `Loader` interface allows loading translations from any source: - -```go -type Loader interface { - Load(lang string) (map[string]Message, *GrammarData, error) - Languages() []string -} -``` - -### Database Loader Example - -```go -type PostgresLoader struct { - db *sql.DB -} - -func (l *PostgresLoader) Languages() []string { - rows, err := l.db.Query("SELECT DISTINCT lang FROM translations") - if err != nil { - return nil - } - defer rows.Close() - - var langs []string - for rows.Next() { - var lang string - rows.Scan(&lang) - langs = append(langs, lang) - } - return langs -} - -func (l *PostgresLoader) Load(lang string) (map[string]i18n.Message, *i18n.GrammarData, error) { - rows, err := l.db.Query( - "SELECT key, text, plural_one, plural_other FROM translations WHERE lang = $1", - lang, - ) - if err != nil { - return nil, nil, err - } - defer rows.Close() - - messages := make(map[string]i18n.Message) - for rows.Next() { - var key, text string - var one, other sql.NullString - rows.Scan(&key, &text, &one, &other) - - if one.Valid || other.Valid { - messages[key] = i18n.Message{One: one.String, Other: other.String} - } else { - messages[key] = i18n.Message{Text: text} - } - } - - return messages, nil, nil -} - -// Usage -svc, err := i18n.NewWithLoader(&PostgresLoader{db: db}) -``` - -### Remote API Loader Example - -```go -type APILoader struct { - baseURL string - client *http.Client -} - -func (l *APILoader) Languages() []string { - resp, _ := l.client.Get(l.baseURL + "/languages") - defer resp.Body.Close() - - var langs []string - json.NewDecoder(resp.Body).Decode(&langs) - return langs -} - -func (l *APILoader) Load(lang string) (map[string]i18n.Message, *i18n.GrammarData, error) { - resp, err := l.client.Get(l.baseURL + "/translations/" + lang) - if err != nil { - return nil, nil, err - } - defer resp.Body.Close() - - var data struct { - Messages map[string]i18n.Message `json:"messages"` - Grammar *i18n.GrammarData `json:"grammar"` - } - json.NewDecoder(resp.Body).Decode(&data) - - return data.Messages, data.Grammar, nil -} -``` - -### Multi-Source Loader - -Combine multiple loaders with fallback: - -```go -type FallbackLoader struct { - primary i18n.Loader - secondary i18n.Loader -} - -func (l *FallbackLoader) Languages() []string { - // Merge languages from both sources - langs := make(map[string]bool) - for _, lang := range l.primary.Languages() { - langs[lang] = true - } - for _, lang := range l.secondary.Languages() { - langs[lang] = true - } - - result := make([]string, 0, len(langs)) - for lang := range langs { - result = append(result, lang) - } - return result -} - -func (l *FallbackLoader) Load(lang string) (map[string]i18n.Message, *i18n.GrammarData, error) { - msgs, grammar, err := l.primary.Load(lang) - if err != nil { - return l.secondary.Load(lang) - } - - // Merge with secondary for missing keys - secondary, secGrammar, _ := l.secondary.Load(lang) - for k, v := range secondary { - if _, exists := msgs[k]; !exists { - msgs[k] = v - } - } - - if grammar == nil { - grammar = secGrammar - } - - return msgs, grammar, nil -} -``` - -## Custom Handlers - -Handlers process keys before standard lookup. Use for dynamic patterns. - -### Handler Interface - -```go -type KeyHandler interface { - Match(key string) bool - Handle(key string, args []any, next func() string) string -} -``` - -### Emoji Handler Example - -```go -type EmojiHandler struct{} - -func (h EmojiHandler) Match(key string) bool { - return strings.HasPrefix(key, "emoji.") -} - -func (h EmojiHandler) Handle(key string, args []any, next func() string) string { - name := strings.TrimPrefix(key, "emoji.") - emojis := map[string]string{ - "success": "✅", - "error": "❌", - "warning": "⚠️", - "info": "ℹ️", - } - if emoji, ok := emojis[name]; ok { - return emoji - } - return next() // Delegate to next handler -} - -// Usage -i18n.AddHandler(EmojiHandler{}) -i18n.T("emoji.success") // "✅" -``` - -### Conditional Handler Example - -```go -type FeatureFlagHandler struct { - flags map[string]bool -} - -func (h FeatureFlagHandler) Match(key string) bool { - return strings.HasPrefix(key, "feature.") -} - -func (h FeatureFlagHandler) Handle(key string, args []any, next func() string) string { - feature := strings.TrimPrefix(key, "feature.") - parts := strings.SplitN(feature, ".", 2) - - if len(parts) < 2 { - return next() - } - - flag, subkey := parts[0], parts[1] - if h.flags[flag] { - // Feature enabled - translate the subkey - return i18n.T(subkey, args...) - } - - // Feature disabled - return empty or fallback - return "" -} -``` - -### Handler Chain Priority - -```go -// Prepend for highest priority (runs first) -svc.PrependHandler(CriticalHandler{}) - -// Append for lower priority (runs after defaults) -svc.AddHandler(FallbackHandler{}) - -// Clear all handlers -svc.ClearHandlers() - -// Add back defaults -svc.AddHandler(i18n.DefaultHandlers()...) -``` - -## Integrating with Frameworks - -### Cobra CLI - -```go -func init() { - // Initialise i18n before command setup - if err := i18n.Init(); err != nil { - log.Fatal(err) - } -} - -var rootCmd = &cobra.Command{ - Use: "myapp", - Short: i18n.T("cmd.root.short"), - Long: i18n.T("cmd.root.long"), -} - -var buildCmd = &cobra.Command{ - Use: "build", - Short: i18n.T("cmd.build.short"), - RunE: func(cmd *cobra.Command, args []string) error { - fmt.Println(i18n.T("i18n.progress.build")) - // ... - fmt.Println(i18n.T("i18n.done.build", "project")) - return nil - }, -} -``` - -### Error Messages - -```go -type LocalisedError struct { - Key string - Args map[string]any -} - -func (e LocalisedError) Error() string { - return i18n.T(e.Key, e.Args) -} - -// Usage -return LocalisedError{ - Key: "error.file_not_found", - Args: map[string]any{"Name": filename}, -} -``` - -### Structured Logging - -```go -func LogInfo(key string, args ...any) { - msg := i18n.T(key, args...) - slog.Info(msg, "i18n_key", key) -} - -func LogError(key string, err error, args ...any) { - msg := i18n.T(key, args...) - slog.Error(msg, "i18n_key", key, "error", err) -} -``` - -## Testing - -### Mock Loader for Tests - -```go -type MockLoader struct { - messages map[string]map[string]i18n.Message -} - -func (l *MockLoader) Languages() []string { - langs := make([]string, 0, len(l.messages)) - for lang := range l.messages { - langs = append(langs, lang) - } - return langs -} - -func (l *MockLoader) Load(lang string) (map[string]i18n.Message, *i18n.GrammarData, error) { - if msgs, ok := l.messages[lang]; ok { - return msgs, nil, nil - } - return nil, nil, fmt.Errorf("language not found: %s", lang) -} - -// Usage in tests -func TestMyFeature(t *testing.T) { - loader := &MockLoader{ - messages: map[string]map[string]i18n.Message{ - "en-GB": { - "test.greeting": {Text: "Hello"}, - "test.farewell": {Text: "Goodbye"}, - }, - }, - } - - svc, _ := i18n.NewWithLoader(loader) - i18n.SetDefault(svc) - - // Test your code - assert.Equal(t, "Hello", i18n.T("test.greeting")) -} -``` - -### Testing Missing Keys - -```go -func TestMissingKeys(t *testing.T) { - svc, _ := i18n.New(i18n.WithMode(i18n.ModeCollect)) - i18n.SetDefault(svc) - - var missing []string - i18n.OnMissingKey(func(m i18n.MissingKey) { - missing = append(missing, m.Key) - }) - - // Run your code that uses translations - runMyFeature() - - // Check for missing keys - assert.Empty(t, missing, "Found missing translation keys: %v", missing) -} -``` - -## Hot Reloading - -Implement a loader that watches for file changes: - -```go -type HotReloadLoader struct { - base *i18n.FSLoader - service *i18n.Service - watcher *fsnotify.Watcher -} - -func (l *HotReloadLoader) Watch() { - for { - select { - case event := <-l.watcher.Events: - if event.Op&fsnotify.Write == fsnotify.Write { - // Reload translations - l.service.LoadFS(os.DirFS("."), "locales") - } - } - } -} -``` - -## Performance Considerations - -1. **Cache translations**: The service caches all loaded messages -2. **Template caching**: Parsed templates are cached in `sync.Map` -3. **Handler chain**: Keep chain short (6 default handlers is fine) -4. **Grammar cache**: Grammar lookups are cached per-language - -For high-throughput applications: -- Pre-warm the cache by calling common translations at startup -- Consider using `Raw()` to bypass handler chain when not needed -- Profile with `go test -bench` if performance is critical diff --git a/docs/pkg/i18n/GRAMMAR.md b/docs/pkg/i18n/GRAMMAR.md deleted file mode 100644 index 2736f96..0000000 --- a/docs/pkg/i18n/GRAMMAR.md +++ /dev/null @@ -1,228 +0,0 @@ -# Grammar Engine - -The i18n grammar engine automatically handles verb conjugation, noun pluralisation, and article selection. It uses a combination of locale-defined rules and built-in English defaults. - -## Verb Conjugation - -### Past Tense - -```go -i18n.PastTense("delete") // "deleted" -i18n.PastTense("create") // "created" -i18n.PastTense("run") // "ran" (irregular) -i18n.PastTense("build") // "built" (irregular) -``` - -**Rules applied (in order):** - -1. Check locale JSON `gram.verb.{verb}.past` -2. Check built-in irregular verbs map -3. Apply regular conjugation rules: - - Ends in 'e' → add 'd' (delete → deleted) - - Ends in consonant + 'y' → change to 'ied' (try → tried) - - Short verb ending in CVC → double consonant (stop → stopped) - - Otherwise → add 'ed' (walk → walked) - -### Gerund (-ing form) - -```go -i18n.Gerund("build") // "building" -i18n.Gerund("run") // "running" -i18n.Gerund("make") // "making" -i18n.Gerund("die") // "dying" -``` - -**Rules applied:** - -1. Check locale JSON `gram.verb.{verb}.gerund` -2. Check built-in irregular verbs map -3. Apply regular rules: - - Ends in 'ie' → change to 'ying' (die → dying) - - Ends in 'e' (not 'ee') → drop 'e', add 'ing' (make → making) - - Short verb ending in CVC → double consonant (run → running) - - Otherwise → add 'ing' (build → building) - -## Noun Pluralisation - -```go -i18n.Pluralize("file", 1) // "file" -i18n.Pluralize("file", 5) // "files" -i18n.Pluralize("child", 2) // "children" (irregular) -i18n.Pluralize("analysis", 3) // "analyses" (Latin) -``` - -**Rules applied (in order):** - -1. Check locale JSON `gram.noun.{noun}.other` -2. Check built-in irregular nouns map -3. Apply regular rules: - - Ends in 's', 'x', 'z', 'ch', 'sh' → add 'es' - - Ends in consonant + 'y' → change to 'ies' - - Ends in 'f' or 'fe' → change to 'ves' (leaf → leaves) - - Otherwise → add 's' - -### Built-in Irregular Nouns - -| Singular | Plural | -|----------|--------| -| child | children | -| person | people | -| man | men | -| woman | women | -| foot | feet | -| tooth | teeth | -| mouse | mice | -| datum | data | -| index | indices | -| crisis | crises | -| fish | fish | -| sheep | sheep | - -## Articles - -```go -i18n.Article("apple") // "an apple" -i18n.Article("banana") // "a banana" -i18n.Article("hour") // "an hour" (silent h) -i18n.Article("user") // "a user" (y sound) -i18n.Article("umbrella") // "an umbrella" -``` - -**Rules:** - -1. Vowel sound words get "an" (a, e, i, o, u start) -2. Consonant sound words get "a" -3. Exception lists handle: - - Silent 'h' words: hour, honest, honour, heir, herb - - 'Y' sound words: user, union, unique, unit, universe - -## Composed Messages - -### Labels - -```go -i18n.Label("status") // "Status:" -i18n.Label("version") // "Version:" -``` - -Uses `gram.punct.label` suffix (default `:`) from locale. - -### Progress Messages - -```go -i18n.Progress("build") // "Building..." -i18n.ProgressSubject("check", "config") // "Checking config..." -``` - -Uses `gram.punct.progress` suffix (default `...`) from locale. - -### Action Results - -```go -i18n.ActionResult("delete", "file") // "File deleted" -i18n.ActionResult("create", "project") // "Project created" -``` - -Pattern: `{Title(subject)} {past(verb)}` - -### Action Failures - -```go -i18n.ActionFailed("delete", "file") // "Failed to delete file" -i18n.ActionFailed("save", "config") // "Failed to save config" -``` - -Pattern: `Failed to {verb} {subject}` - -## Locale Configuration - -Define grammar in your locale JSON: - -```json -{ - "gram": { - "verb": { - "deploy": { - "past": "deployed", - "gerund": "deploying" - }, - "sync": { - "past": "synced", - "gerund": "syncing" - } - }, - "noun": { - "repository": { - "one": "repository", - "other": "repositories" - }, - "schema": { - "one": "schema", - "other": "schemata" - } - }, - "article": { - "indefinite": { - "default": "a", - "vowel": "an" - }, - "definite": "the" - }, - "punct": { - "label": ":", - "progress": "..." - }, - "word": { - "status": "status", - "version": "version" - } - } -} -``` - -## Template Functions - -Use grammar functions in templates: - -```go -template.New("").Funcs(i18n.TemplateFuncs()) -``` - -| Function | Example | Result | -|----------|---------|--------| -| `past` | `{{past "delete"}}` | "deleted" | -| `gerund` | `{{gerund "build"}}` | "building" | -| `plural` | `{{plural "file" 5}}` | "files" | -| `article` | `{{article "apple"}}` | "an apple" | -| `title` | `{{title "hello world"}}` | "Hello World" | -| `lower` | `{{lower "HELLO"}}` | "hello" | -| `upper` | `{{upper "hello"}}` | "HELLO" | -| `quote` | `{{quote "text"}}` | `"text"` | - -## Language-Specific Grammar - -The grammar engine loads language-specific data when available: - -```go -// Get grammar data for a language -data := i18n.GetGrammarData("de-DE") -if data != nil { - // Access verb forms, noun forms, etc. -} - -// Set grammar data programmatically -i18n.SetGrammarData("de-DE", &i18n.GrammarData{ - Verbs: map[string]i18n.VerbForms{ - "machen": {Past: "gemacht", Gerund: "machend"}, - }, -}) -``` - -## Performance - -Grammar results are computed on-demand but templates are cached: - -- First call: Parse template + apply grammar -- Subsequent calls: Reuse cached template - -The template cache uses `sync.Map` for thread-safe concurrent access. diff --git a/docs/pkg/i18n/README.md b/docs/pkg/i18n/README.md deleted file mode 100644 index a70dfc3..0000000 --- a/docs/pkg/i18n/README.md +++ /dev/null @@ -1,420 +0,0 @@ -# i18n Package - -The `pkg/i18n` package provides internationalisation and localisation for Go CLI applications. It features a grammar engine for automatic verb conjugation and noun pluralisation, CLDR plural support, and an extensible handler chain for dynamic key patterns. - -## Quick Start - -```go -import "github.com/host-uk/core/pkg/i18n" - -func main() { - // Initialise with embedded locales - svc, err := i18n.New() - if err != nil { - log.Fatal(err) - } - i18n.SetDefault(svc) - - // Translate messages - fmt.Println(i18n.T("cli.success")) // "Operation completed" - fmt.Println(i18n.T("i18n.count.file", 5)) // "5 files" - fmt.Println(i18n.T("i18n.progress.build")) // "Building..." - fmt.Println(i18n.T("i18n.done.delete", "config.yaml")) // "Config.yaml deleted" -} -``` - -## Table of Contents - -- [Basic Translation](#basic-translation) -- [Template Variables](#template-variables) -- [Pluralisation](#pluralisation) -- [Magic Namespaces](#magic-namespaces) -- [Subjects](#subjects) -- [Grammar Engine](#grammar-engine) -- [Formality](#formality) -- [Modes](#modes) -- [Custom Loaders](#custom-loaders) -- [Custom Handlers](#custom-handlers) -- [Locale File Format](#locale-file-format) - -## Basic Translation - -The `T()` function translates message keys: - -```go -// Simple translation -msg := i18n.T("cli.success") - -// With template variables -msg := i18n.T("error.not_found", map[string]any{ - "Name": "config.yaml", -}) -``` - -Use `Raw()` to bypass magic namespace handling: - -```go -// T() handles i18n.* magic -i18n.T("i18n.label.status") // "Status:" - -// Raw() does direct lookup only -i18n.Raw("i18n.label.status") // Returns key as-is (not in JSON) -``` - -## Template Variables - -Translation strings support Go templates: - -```json -{ - "greeting": "Hello, {{.Name}}!", - "summary": "Found {{.Count}} {{if eq .Count 1}}item{{else}}items{{end}}" -} -``` - -```go -i18n.T("greeting", map[string]any{"Name": "World"}) // "Hello, World!" -i18n.T("summary", map[string]any{"Count": 3}) // "Found 3 items" -``` - -### Available Template Functions - -| Function | Description | Example | -|----------|-------------|---------| -| `title` | Title case | `{{title .Name}}` | -| `lower` | Lowercase | `{{lower .Name}}` | -| `upper` | Uppercase | `{{upper .Name}}` | -| `past` | Past tense | `{{past "delete"}}` → "deleted" | -| `gerund` | -ing form | `{{gerund "build"}}` → "building" | -| `plural` | Pluralise | `{{plural "file" .Count}}` | -| `article` | Add article | `{{article "apple"}}` → "an apple" | -| `quote` | Add quotes | `{{quote .Name}}` → `"name"` | - -## Pluralisation - -The package supports full CLDR plural categories: - -```json -{ - "item_count": { - "zero": "No items", - "one": "{{.Count}} item", - "two": "{{.Count}} items", - "few": "{{.Count}} items", - "many": "{{.Count}} items", - "other": "{{.Count}} items" - } -} -``` - -```go -i18n.T("item_count", map[string]any{"Count": 0}) // "No items" (if zero defined) -i18n.T("item_count", map[string]any{"Count": 1}) // "1 item" -i18n.T("item_count", map[string]any{"Count": 5}) // "5 items" -``` - -For simple cases, use `i18n.count.*`: - -```go -i18n.T("i18n.count.file", 1) // "1 file" -i18n.T("i18n.count.file", 5) // "5 files" -``` - -## Magic Namespaces - -The `i18n.*` namespace provides automatic message composition: - -### Labels (`i18n.label.*`) - -```go -i18n.T("i18n.label.status") // "Status:" -i18n.T("i18n.label.version") // "Version:" -``` - -### Progress (`i18n.progress.*`) - -```go -i18n.T("i18n.progress.build") // "Building..." -i18n.T("i18n.progress.check", "config") // "Checking config..." -``` - -### Counts (`i18n.count.*`) - -```go -i18n.T("i18n.count.file", 1) // "1 file" -i18n.T("i18n.count.file", 5) // "5 files" -i18n.T("i18n.count.repo", 10) // "10 repos" -``` - -### Done (`i18n.done.*`) - -```go -i18n.T("i18n.done.delete", "file") // "File deleted" -i18n.T("i18n.done.create", "project") // "Project created" -``` - -### Fail (`i18n.fail.*`) - -```go -i18n.T("i18n.fail.delete", "file") // "Failed to delete file" -i18n.T("i18n.fail.save", "config") // "Failed to save config" -``` - -### Numeric (`i18n.numeric.*`) - -```go -i18n.N("number", 1234567) // "1,234,567" -i18n.N("percent", 0.85) // "85%" -i18n.N("bytes", 1536000) // "1.46 MB" -i18n.N("ordinal", 1) // "1st" -``` - -## Subjects - -Subjects provide typed context for translations: - -```go -// Create a subject -subj := i18n.S("file", "config.yaml") - -// Chain methods for additional context -subj := i18n.S("file", files). - Count(len(files)). - In("workspace"). - Formal() - -// Use in translations -i18n.T("i18n.done.delete", subj.String()) -``` - -### Subject Methods - -| Method | Description | -|--------|-------------| -| `Count(n)` | Set count for pluralisation | -| `Gender(g)` | Set grammatical gender | -| `In(loc)` | Set location context | -| `Formal()` | Set formal address | -| `Informal()` | Set informal address | - -## Grammar Engine - -The grammar engine handles verb conjugation and noun forms: - -```go -// Verb conjugation -i18n.PastTense("delete") // "deleted" -i18n.PastTense("run") // "ran" (irregular) -i18n.Gerund("build") // "building" -i18n.Gerund("run") // "running" - -// Noun pluralisation -i18n.Pluralize("file", 1) // "file" -i18n.Pluralize("file", 5) // "files" -i18n.Pluralize("child", 2) // "children" (irregular) - -// Articles -i18n.Article("apple") // "an apple" -i18n.Article("banana") // "a banana" - -// Composed messages -i18n.Label("status") // "Status:" -i18n.Progress("build") // "Building..." -i18n.ProgressSubject("check", "cfg") // "Checking cfg..." -i18n.ActionResult("delete", "file") // "File deleted" -i18n.ActionFailed("save", "config") // "Failed to save config" -``` - -### Customising Grammar - -Add irregular forms in your locale JSON: - -```json -{ - "gram": { - "verb": { - "deploy": { "past": "deployed", "gerund": "deploying" } - }, - "noun": { - "repository": { "one": "repository", "other": "repositories" } - }, - "punct": { - "label": ":", - "progress": "..." - } - } -} -``` - -## Formality - -For languages with formal/informal address (German Sie/du, French vous/tu): - -```go -// Set service-wide formality -svc.SetFormality(i18n.FormalityFormal) - -// Per-translation formality via Subject -i18n.T("greeting", i18n.S("user", name).Formal()) -i18n.T("greeting", i18n.S("user", name).Informal()) - -// Per-translation via TranslationContext -i18n.T("greeting", i18n.C("customer support").Formal()) -``` - -Define formality variants in JSON: - -```json -{ - "greeting": "Hello", - "greeting._formal": "Good morning, sir", - "greeting._informal": "Hey there" -} -``` - -## Modes - -Three modes control missing key behaviour: - -```go -// Normal (default): Returns key as-is -i18n.SetMode(i18n.ModeNormal) -i18n.T("missing.key") // "missing.key" - -// Strict: Panics on missing keys (dev/CI) -i18n.SetMode(i18n.ModeStrict) -i18n.T("missing.key") // panic! - -// Collect: Dispatches to handler (QA testing) -i18n.SetMode(i18n.ModeCollect) -i18n.OnMissingKey(func(m i18n.MissingKey) { - log.Printf("MISSING: %s at %s:%d", m.Key, m.CallerFile, m.CallerLine) -}) -``` - -## Custom Loaders - -Implement the `Loader` interface for custom storage: - -```go -type Loader interface { - Load(lang string) (map[string]Message, *GrammarData, error) - Languages() []string -} -``` - -Example database loader: - -```go -type DBLoader struct { - db *sql.DB -} - -func (l *DBLoader) Languages() []string { - // Query available languages from database -} - -func (l *DBLoader) Load(lang string) (map[string]i18n.Message, *i18n.GrammarData, error) { - // Load translations from database -} - -// Use custom loader -svc, err := i18n.NewWithLoader(&DBLoader{db: db}) -``` - -## Custom Handlers - -Add custom key handlers for dynamic patterns: - -```go -type MyHandler struct{} - -func (h MyHandler) Match(key string) bool { - return strings.HasPrefix(key, "my.prefix.") -} - -func (h MyHandler) Handle(key string, args []any, next func() string) string { - // Handle the key or call next() to delegate - return "custom result" -} - -// Add to handler chain -svc.AddHandler(MyHandler{}) // Append (lower priority) -svc.PrependHandler(MyHandler{}) // Prepend (higher priority) -``` - -## Locale File Format - -Locale files use nested JSON with dot-notation access: - -```json -{ - "cli": { - "success": "Operation completed", - "error": { - "not_found": "{{.Name}} not found" - } - }, - "cmd": { - "build": { - "short": "Build the project", - "long": "Build compiles source files into an executable" - } - }, - "gram": { - "verb": { - "build": { "past": "built", "gerund": "building" } - }, - "noun": { - "file": { "one": "file", "other": "files" } - }, - "punct": { - "label": ":", - "progress": "..." - } - } -} -``` - -Access keys with dot notation: - -```go -i18n.T("cli.success") // "Operation completed" -i18n.T("cli.error.not_found") // "{{.Name}} not found" -i18n.T("cmd.build.short") // "Build the project" -``` - -## Configuration Options - -Use functional options when creating a service: - -```go -svc, err := i18n.New( - i18n.WithFallback("de-DE"), // Fallback language - i18n.WithFormality(i18n.FormalityFormal), // Default formality - i18n.WithMode(i18n.ModeStrict), // Missing key mode - i18n.WithDebug(true), // Show [key] prefix -) -``` - -## Thread Safety - -The package is fully thread-safe: - -- `Service` uses `sync.RWMutex` for state -- Global `Default()` uses `atomic.Pointer` -- `OnMissingKey` uses `atomic.Value` -- `FSLoader.Languages()` uses `sync.Once` - -Safe for concurrent use from multiple goroutines. - -## Debug Mode - -Enable debug mode to see translation keys: - -```go -i18n.SetDebug(true) -i18n.T("cli.success") // "[cli.success] Operation completed" -``` - -Useful for identifying which keys are used where. diff --git a/docs/pkg/log.md b/docs/pkg/log.md new file mode 100644 index 0000000..15e9db1 --- /dev/null +++ b/docs/pkg/log.md @@ -0,0 +1,83 @@ +# Logging Reference + +Logging lives in the root `core` package in this repository. There is no separate `pkg/log` import path here. + +## Create a Logger + +```go +logger := core.NewLog(core.LogOptions{ + Level: core.LevelInfo, +}) +``` + +## Levels + +| Level | Meaning | +|-------|---------| +| `LevelQuiet` | no output | +| `LevelError` | errors and security events | +| `LevelWarn` | warnings, errors, security events | +| `LevelInfo` | informational, warnings, errors, security events | +| `LevelDebug` | everything | + +## Log Methods + +```go +logger.Debug("workspace discovered", "path", "/srv/workspaces") +logger.Info("service started", "service", "audit") +logger.Warn("retrying fetch", "attempt", 2) +logger.Error("fetch failed", "err", err) +logger.Security("sandbox escape detected", "path", attemptedPath) +``` + +## Default Logger + +The package owns a default logger. + +```go +core.SetLevel(core.LevelDebug) +core.SetRedactKeys("token", "password") + +core.Info("service started", "service", "audit") +``` + +## Redaction + +Values for keys listed in `RedactKeys` are replaced with `[REDACTED]`. + +```go +logger.SetRedactKeys("token") +logger.Info("login", "user", "cladius", "token", "secret-value") +``` + +## Output and Rotation + +```go +logger := core.NewLog(core.LogOptions{ + Level: core.LevelInfo, + Output: os.Stderr, +}) +``` + +If you provide `Rotation` and set `RotationWriterFactory`, the logger writes to the rotating writer instead of the plain output stream. + +## Error-Aware Logging + +`LogErr` extracts structured error context before logging: + +```go +le := core.NewLogErr(logger) +le.Log(err) +``` + +`ErrorLog` is the log-and-return wrapper exposed through `c.Log()`. + +## Panic-Aware Logging + +`LogPanic` is the lightweight panic logger: + +```go +defer core.NewLogPanic(logger).Recover() +``` + +It logs the recovered panic but does not manage crash files. For crash reports, use `c.Error().Recover()`. diff --git a/docs/plans/2026-01-30-cli-commands-design.md b/docs/plans/2026-01-30-cli-commands-design.md deleted file mode 100644 index 73212af..0000000 --- a/docs/plans/2026-01-30-cli-commands-design.md +++ /dev/null @@ -1,185 +0,0 @@ -# CLI Commands Registration Design - -## Overview - -Move CLI commands from `cmd/` into self-registering packages in `pkg/`. This enables build variants with reduced attack surface - only compiled code exists in the binary. - -## Pattern - -Same pattern as `i18n.RegisterLocales()`: -- Packages register themselves during `init()` -- Registration is stored until `cli.Init()` runs -- Build tags control which packages are imported - -## Registration API - -```go -// pkg/cli/commands.go - -type CommandRegistration func(root *cobra.Command) - -var ( - registeredCommands []CommandRegistration - registeredCommandsMu sync.Mutex -) - -// RegisterCommands registers a function that adds commands to the CLI. -func RegisterCommands(fn CommandRegistration) { - registeredCommandsMu.Lock() - defer registeredCommandsMu.Unlock() - registeredCommands = append(registeredCommands, fn) -} - -func attachRegisteredCommands(root *cobra.Command) { - registeredCommandsMu.Lock() - defer registeredCommandsMu.Unlock() - for _, fn := range registeredCommands { - fn(root) - } -} -``` - -## Integration with Core.App - -The CLI stores `rootCmd` in `core.App`, unifying GUI and CLI under the same pattern: - -```go -// pkg/cli/runtime.go - -func Init(opts Options) error { - once.Do(func() { - rootCmd := &cobra.Command{ - Use: opts.AppName, - Version: opts.Version, - } - - attachRegisteredCommands(rootCmd) - - c, err := framework.New( - framework.WithApp(rootCmd), - // ... services ... - ) - // ... - }) - return initErr -} - -func RootCmd() *cobra.Command { - return framework.App().(*cobra.Command) -} - -func Execute() error { - return RootCmd().Execute() -} -``` - -## Package Structure - -Commands move from `cmd/` to `pkg/` with a `cmd.go` file: - -``` -pkg/ -├── php/ -│ ├── i18n.go # registers locales -│ ├── cmd.go # registers commands -│ ├── locales/ -│ └── ... -├── dev/ -│ ├── cmd.go # registers commands -│ └── ... -├── cli/ -│ ├── commands.go # RegisterCommands API -│ ├── runtime.go # Init, Execute -│ └── ... -``` - -Each `cmd.go`: - -```go -// pkg/php/cmd.go -package php - -import "github.com/host-uk/core/pkg/cli" - -func init() { - cli.RegisterCommands(AddCommands) -} - -func AddCommands(root *cobra.Command) { - // ... existing command setup ... -} -``` - -## Build Variants - -Import files with build tags in `cmd/variants/`: - -``` -cmd/ -├── main.go -└── variants/ - ├── full.go # default: all packages - ├── ci.go # CI/release only - ├── php.go # PHP tooling only - └── minimal.go # core only -``` - -```go -// cmd/variants/full.go -//go:build !ci && !php && !minimal - -package variants - -import ( - _ "github.com/host-uk/core/pkg/ai" - _ "github.com/host-uk/core/pkg/build" - _ "github.com/host-uk/core/pkg/ci" - _ "github.com/host-uk/core/pkg/dev" - _ "github.com/host-uk/core/pkg/docs" - _ "github.com/host-uk/core/pkg/doctor" - _ "github.com/host-uk/core/pkg/go" - _ "github.com/host-uk/core/pkg/php" - _ "github.com/host-uk/core/pkg/pkg" - _ "github.com/host-uk/core/pkg/sdk" - _ "github.com/host-uk/core/pkg/setup" - _ "github.com/host-uk/core/pkg/test" - _ "github.com/host-uk/core/pkg/vm" -) -``` - -```go -// cmd/variants/ci.go -//go:build ci - -package variants - -import ( - _ "github.com/host-uk/core/pkg/build" - _ "github.com/host-uk/core/pkg/ci" - _ "github.com/host-uk/core/pkg/doctor" - _ "github.com/host-uk/core/pkg/sdk" -) -``` - -## Build Commands - -- `go build` → full variant (default) -- `go build -tags ci` → CI variant -- `go build -tags php` → PHP-only variant - -## Benefits - -1. **Smaller attack surface** - only compiled code exists in binary -2. **Self-registering packages** - same pattern as `i18n.RegisterLocales()` -3. **Uses existing `core.App`** - no new framework concepts -4. **Simple build variants** - just add `-tags` flag -5. **Defence in depth** - no code = no vulnerabilities - -## Migration Steps - -1. Add `RegisterCommands()` to `pkg/cli/commands.go` -2. Update `pkg/cli/runtime.go` to use `core.App` for rootCmd -3. Move each `cmd/*` package to `pkg/*/cmd.go` -4. Create `cmd/variants/` with build tag files -5. Simplify `cmd/main.go` to minimal entry point -6. Remove old `cmd/core_dev.go` and `cmd/core_ci.go` diff --git a/docs/plans/2026-01-30-core-ipc-design.md b/docs/plans/2026-01-30-core-ipc-design.md deleted file mode 100644 index ec3c9c3..0000000 --- a/docs/plans/2026-01-30-core-ipc-design.md +++ /dev/null @@ -1,373 +0,0 @@ -# Core Framework IPC Design - -> Design document for refactoring CLI commands to use the Core framework's IPC system. - -## Overview - -The Core framework provides a dependency injection and inter-process communication (IPC) system originally designed for orchestrating services. This design extends the framework with request/response patterns and applies it to CLI commands. - -Commands build "worker bundles" - sandboxed Core instances with specific services. The bundle configuration acts as a permissions layer: if a service isn't registered, that capability isn't available. - -## Dispatch Patterns - -Four patterns for service communication: - -| Method | Behaviour | Returns | Use Case | -|--------|-----------|---------|----------| -| `ACTION` | Broadcast to all handlers | `error` | Events, notifications | -| `QUERY` | First responder wins | `(any, bool, error)` | Get data | -| `QUERYALL` | Broadcast, collect all | `([]any, error)` | Aggregate from multiple services | -| `PERFORM` | First responder executes | `(any, bool, error)` | Execute a task with side effects | - -### ACTION (existing) - -Fire-and-forget broadcast. All registered handlers receive the message. Errors are aggregated. - -```go -c.ACTION(ActionServiceStartup{}) -``` - -### QUERY (new) - -Request data from services. Stops at first handler that returns `handled=true`. - -```go -result, handled, err := c.QUERY(git.QueryStatus{Paths: paths}) -if !handled { - // No service registered to handle this query -} -statuses := result.([]git.RepoStatus) -``` - -### QUERYALL (new) - -Broadcast query to all handlers, collect all responses. Useful for aggregating results from multiple services (e.g., multiple QA/lint tools). - -```go -results, err := c.QUERYALL(qa.QueryLint{Paths: paths}) -for _, r := range results { - lint := r.(qa.LintResult) - fmt.Printf("%s found %d issues\n", lint.Tool, len(lint.Issues)) -} -``` - -### PERFORM (new) - -Execute a task with side effects. Stops at first handler that returns `handled=true`. - -```go -result, handled, err := c.PERFORM(agentic.TaskCommit{ - Path: repo.Path, - Name: repo.Name, -}) -if !handled { - // Agentic service not in bundle - commits not available -} -``` - -## Architecture - -``` -┌─────────────────────────────────────────────────────────────┐ -│ cmd/dev/dev_work.go │ -│ - Builds worker bundle │ -│ - Triggers PERFORM(TaskWork{}) │ -└─────────────────────┬───────────────────────────────────────┘ - │ -┌─────────────────────▼───────────────────────────────────────┐ -│ cmd/dev/bundles.go │ -│ - NewWorkBundle() - git + agentic + dev │ -│ - NewStatusBundle() - git + dev only │ -│ - Bundle config = permissions │ -└─────────────────────┬───────────────────────────────────────┘ - │ -┌─────────────────────▼───────────────────────────────────────┐ -│ pkg/dev/service.go │ -│ - Orchestrates workflow │ -│ - QUERY(git.QueryStatus{}) │ -│ - PERFORM(agentic.TaskCommit{}) │ -│ - PERFORM(git.TaskPush{}) │ -└─────────────────────┬───────────────────────────────────────┘ - │ - ┌─────────────┴─────────────┐ - ▼ ▼ -┌───────────────────┐ ┌───────────────────┐ -│ pkg/git/service │ │ pkg/agentic/svc │ -│ │ │ │ -│ Queries: │ │ Tasks: │ -│ - QueryStatus │ │ - TaskCommit │ -│ - QueryDirtyRepos │ │ - TaskPrompt │ -│ - QueryAheadRepos │ │ │ -│ │ │ │ -│ Tasks: │ │ │ -│ - TaskPush │ │ │ -│ - TaskPull │ │ │ -└───────────────────┘ └───────────────────┘ -``` - -## Permissions Model - -Permissions are implicit through bundle configuration: - -```go -// Full capabilities - can commit and push -func NewWorkBundle(opts WorkBundleOptions) (*framework.Runtime, error) { - return framework.NewWithFactories(nil, map[string]framework.ServiceFactory{ - "dev": func() (any, error) { return dev.NewService(opts.Dev)(nil) }, - "git": func() (any, error) { return git.NewService(opts.Git)(nil) }, - "agentic": func() (any, error) { return agentic.NewService(opts.Agentic)(nil) }, - }) -} - -// Read-only - status queries only, no commits -func NewStatusBundle(opts StatusBundleOptions) (*framework.Runtime, error) { - return framework.NewWithFactories(nil, map[string]framework.ServiceFactory{ - "dev": func() (any, error) { return dev.NewService(opts.Dev)(nil) }, - "git": func() (any, error) { return git.NewService(opts.Git)(nil) }, - // No agentic service - TaskCommit will be unhandled - }) -} -``` - -Service options provide fine-grained control: - -```go -agentic.NewService(agentic.ServiceOptions{ - AllowEdit: false, // Claude can only use read-only tools -}) - -agentic.NewService(agentic.ServiceOptions{ - AllowEdit: true, // Claude can use Write/Edit tools -}) -``` - -**Key principle**: Code never checks permissions explicitly. It dispatches actions and either they're handled or they're not. The bundle configuration is the single source of truth for what's allowed. - -## Framework Changes - -### New Types (interfaces.go) - -```go -type Query interface{} -type Task interface{} - -type QueryHandler func(*Core, Query) (any, bool, error) -type TaskHandler func(*Core, Task) (any, bool, error) -``` - -### Core Struct Additions (interfaces.go) - -```go -type Core struct { - // ... existing fields - - queryMu sync.RWMutex - queryHandlers []QueryHandler - - taskMu sync.RWMutex - taskHandlers []TaskHandler -} -``` - -### New Methods (core.go) - -```go -// QUERY - first responder wins -func (c *Core) QUERY(q Query) (any, bool, error) - -// QUERYALL - broadcast, collect all responses -func (c *Core) QUERYALL(q Query) ([]any, error) - -// PERFORM - first responder executes -func (c *Core) PERFORM(t Task) (any, bool, error) - -// Registration -func (c *Core) RegisterQuery(h QueryHandler) -func (c *Core) RegisterTask(h TaskHandler) -``` - -### Re-exports (framework.go) - -```go -type Query = core.Query -type Task = core.Task -type QueryHandler = core.QueryHandler -type TaskHandler = core.TaskHandler -``` - -## Service Implementation Pattern - -Services register handlers during startup: - -```go -func (s *Service) OnStartup(ctx context.Context) error { - s.Core().RegisterAction(s.handleAction) - s.Core().RegisterQuery(s.handleQuery) - s.Core().RegisterTask(s.handleTask) - return nil -} - -func (s *Service) handleQuery(c *framework.Core, q framework.Query) (any, bool, error) { - switch m := q.(type) { - case QueryStatus: - result := s.getStatus(m.Paths, m.Names) - return result, true, nil - case QueryDirtyRepos: - return s.DirtyRepos(), true, nil - } - return nil, false, nil // Not handled -} - -func (s *Service) handleTask(c *framework.Core, t framework.Task) (any, bool, error) { - switch m := t.(type) { - case TaskPush: - err := s.push(m.Path) - return nil, true, err - case TaskPull: - err := s.pull(m.Path) - return nil, true, err - } - return nil, false, nil // Not handled -} -``` - -## Git Service Queries & Tasks - -```go -// pkg/git/queries.go -type QueryStatus struct { - Paths []string - Names map[string]string -} - -type QueryDirtyRepos struct{} -type QueryAheadRepos struct{} - -// pkg/git/tasks.go -type TaskPush struct { - Path string - Name string -} - -type TaskPull struct { - Path string - Name string -} - -type TaskPushMultiple struct { - Paths []string - Names map[string]string -} -``` - -## Agentic Service Tasks - -```go -// pkg/agentic/tasks.go -type TaskCommit struct { - Path string - Name string - CanEdit bool -} - -type TaskPrompt struct { - Prompt string - WorkDir string - AllowedTools []string -} -``` - -## Dev Workflow Service - -```go -// pkg/dev/tasks.go -type TaskWork struct { - RegistryPath string - StatusOnly bool - AutoCommit bool -} - -type TaskCommitAll struct { - RegistryPath string -} - -type TaskPushAll struct { - RegistryPath string - Force bool -} -``` - -## Command Simplification - -Before (dev_work.go - 327 lines of orchestration): - -```go -func runWork(registryPath string, statusOnly, autoCommit bool) error { - // Load registry - // Get git status - // Display table - // Loop dirty repos, shell out to claude - // Re-check status - // Confirm push - // Push repos - // Handle diverged branches - // ... -} -``` - -After (dev_work.go - minimal): - -```go -func runWork(registryPath string, statusOnly, autoCommit bool) error { - bundle, err := NewWorkBundle(WorkBundleOptions{ - RegistryPath: registryPath, - }) - if err != nil { - return err - } - - ctx := context.Background() - bundle.Core.ServiceStartup(ctx, nil) - defer bundle.Core.ServiceShutdown(ctx) - - _, _, err = bundle.Core.PERFORM(dev.TaskWork{ - StatusOnly: statusOnly, - AutoCommit: autoCommit, - }) - return err -} -``` - -All orchestration logic moves to `pkg/dev/service.go` where it can be tested independently and reused. - -## Implementation Tasks - -1. **Framework Core** - Add Query, Task types and QUERY/QUERYALL/PERFORM methods -2. **Framework Re-exports** - Update framework.go with new types -3. **Git Service** - Add query and task handlers -4. **Agentic Service** - Add task handlers -5. **Dev Service** - Create workflow orchestration service -6. **Bundles** - Create bundle factories in cmd/dev/ -7. **Commands** - Simplify cmd/dev/*.go to use bundles - -## Future: CLI-Wide Runtime - -Phase 2 will add a CLI-wide Core instance that: - -- Handles signals (SIGINT, SIGTERM) -- Manages UI state -- Spawns worker bundles as "interactable elements" -- Provides cross-bundle communication - -Worker bundles become sandboxed children of the CLI runtime, with the runtime controlling what capabilities each bundle receives. - -## Testing - -Each layer is independently testable: - -- **Framework**: Unit tests for QUERY/QUERYALL/PERFORM dispatch -- **Services**: Unit tests with mock Core instances -- **Bundles**: Integration tests with real services -- **Commands**: E2E tests via CLI invocation - -The permission model is testable by creating bundles with/without specific services and verifying behaviour. diff --git a/docs/plans/2026-01-30-i18n-v2-design.md b/docs/plans/2026-01-30-i18n-v2-design.md deleted file mode 100644 index c5a4cb8..0000000 --- a/docs/plans/2026-01-30-i18n-v2-design.md +++ /dev/null @@ -1,134 +0,0 @@ -# i18n Package Refactor Design - -## Goal -Refactor pkg/i18n to be extensible without breaking changes in future. Based on Gemini review recommendations. - -## File Structure - -### Renamed/Merged -| Current | New | Reason | -|---------|-----|--------| -| `interfaces.go` | `types.go` | Contains types, not interfaces | -| `mutate.go` | `loader.go` | Loads/flattens JSON | -| `actions.go` | `hooks.go` | Missing key callbacks | -| `checks.go` | (merge into loader.go) | Loading helpers | -| `mode.go` | (merge into types.go) | Just one type | - -### New Files -| File | Purpose | -|------|---------| -| `handler.go` | KeyHandler interface + built-in handlers | -| `context.go` | TranslationContext + C() helper | - -### Unchanged -`grammar.go`, `language.go`, `localise.go`, `debug.go`, `numbers.go`, `time.go`, `i18n.go`, `intents.go`, `compose.go`, `transform.go` - -## Interfaces - -### KeyHandler -```go -// KeyHandler processes translation keys before standard lookup. -type KeyHandler interface { - Match(key string) bool - Handle(key string, args []any, next func() string) string -} -``` - -Built-in handlers: -- `LabelHandler` - `i18n.label.*` → "Status:" -- `ProgressHandler` - `i18n.progress.*` → "Building..." -- `CountHandler` - `i18n.count.*` → "5 files" -- `NumericHandler` - `i18n.numeric.*` → formatted numbers -- `DoneHandler` - `i18n.done.*` → "File deleted" -- `FailHandler` - `i18n.fail.*` → "Failed to delete file" - -### Loader -```go -// Loader provides translation data to the Service. -type Loader interface { - Load(lang string) (map[string]Message, *GrammarData, error) - Languages() []string -} -``` - -Built-in: `FSLoader` for embedded/filesystem JSON. - -### TranslationContext -```go -type TranslationContext struct { - Context string - Gender string - Formality Formality - Extra map[string]any -} - -func C(context string) *TranslationContext -``` - -## Service Changes - -```go -type Service struct { - loader Loader - messages map[string]map[string]Message - grammar map[string]*GrammarData - currentLang string - fallbackLang string - formality Formality - mode Mode - debug bool - handlers []KeyHandler - mu sync.RWMutex -} -``` - -### Constructors -```go -func New() (*Service, error) -func NewWithLoader(loader Loader, opts ...Option) (*Service, error) - -type Option func(*Service) -func WithDefaultHandlers() Option -func WithFallback(lang string) Option -func WithFormality(f Formality) Option -``` - -### T() Flow -1. Parse args → extract Context, Subject, data -2. Run handler chain (each can handle or call next) -3. Standard lookup with context suffix fallback - -## Public API - -### Keep -- `T(key, args...)`, `Raw(key, args...)` -- `S(noun, value)` - Subject builder -- `SetLanguage()`, `CurrentLanguage()`, `SetMode()`, `CurrentMode()` -- `SetFormality()`, `SetDebug()`, `Direction()`, `IsRTL()` -- Grammar: `PastTense()`, `Gerund()`, `Pluralize()`, `Article()`, `Title()`, `Label()`, `Progress()` - -### Add -- `C(context)` - Context builder -- `NewWithLoader()` - Custom loader support -- `AddHandler()`, `PrependHandler()` - Custom handlers - -### Remove (No Aliases) -- `NewSubject()` - use `S()` -- `N()` - use `T("i18n.numeric.*")` - -## Breaking Changes -- Constructor signature changes -- Internal file reorganisation -- No backwards compatibility layer - -## Implementation Order -1. Create new files (types.go, handler.go, loader.go, context.go, hooks.go) -2. Move types from interfaces.go → types.go -3. Implement Loader interface + FSLoader -4. Implement KeyHandler interface + built-in handlers -5. Implement TranslationContext -6. Update Service struct + constructors -7. Update T() to use handler chain -8. Update package-level functions in i18n.go -9. Delete old files -10. Update tests diff --git a/docs/plans/2026-01-30-semantic-i18n-design.md b/docs/plans/2026-01-30-semantic-i18n-design.md deleted file mode 100644 index ca28e9d..0000000 --- a/docs/plans/2026-01-30-semantic-i18n-design.md +++ /dev/null @@ -1,486 +0,0 @@ -# Semantic i18n System Design - -## Overview - -Extend the i18n system beyond simple key-value translation to support **semantic intents** that encode meaning, enabling: - -- Composite translations from reusable fragments -- Grammatical awareness (gender, plurality, formality) -- CLI prompt integration with localized options -- Reduced calling code complexity - -## Goals - -1. **Simple cases stay simple** - `_("key")` works as expected -2. **Complex cases become declarative** - Intent drives output, not caller logic -3. **Translators have power** - Grammar rules live in translations, not code -4. **CLI integration** - Questions, confirmations, choices are first-class - -## API Design - -### Function Reference (Stable API) - -These function names are **permanent** - choose carefully, they cannot change. - -| Function | Alias | Purpose | -|----------|-------|---------| -| `_()` | - | Simple gettext-style lookup | -| `T()` | `C()` | Compose - semantic intent resolution | -| `S()` | `Subject()` | Create typed subject with metadata | - -### Simple Translation: `_()` - -Standard gettext-style lookup. No magic, just key → value. - -```go -i18n._("cli.success") // "Success" -i18n._("common.label.error") // "Error:" -i18n._("common.error.failed", map[string]any{"Action": "load"}) // "Failed to load" -``` - -### Compose: `T()` / `C()` - -Semantic intent resolution. Takes an intent key from `core.*` namespace and returns a `Composed` result with multiple output forms. - -```go -// Full form -result := i18n.T("core.delete", i18n.S("file", path)) -result := i18n.C("core.delete", i18n.S("file", path)) // Alias - -// Result contains all forms -result.Question // "Delete /path/to/file.txt?" -result.Confirm // "Really delete /path/to/file.txt?" -result.Success // "File deleted" -result.Failure // "Failed to delete file" -result.Meta // IntentMeta{Dangerous: true, Default: "no", ...} -``` - -### Subject: `S()` / `Subject()` - -Creates a typed subject with optional metadata for grammar rules. - -```go -// Simple -i18n.S("file", "/path/to/file.txt") - -// With count (plurality) -i18n.S("commit", commits).Count(len(commits)) - -// With gender (for gendered languages) -i18n.S("user", name).Gender("female") - -// Chained -i18n.S("file", path).Count(3).In("/project") -``` - -### Type Signatures - -```go -// Simple lookup -func _(key string, args ...any) string - -// Compose (T and C are aliases) -func T(intent string, subject *Subject) *Composed -func C(intent string, subject *Subject) *Composed - -// Subject builder -func S(noun string, value any) *Subject -func Subject(noun string, value any) *Subject - -// Composed result -type Composed struct { - Question string - Confirm string - Success string - Failure string - Meta IntentMeta -} - -// Subject with metadata -type Subject struct { - Noun string - Value any - count int - gender string - // ... other metadata -} - -func (s *Subject) Count(n int) *Subject -func (s *Subject) Gender(g string) *Subject -func (s *Subject) In(location string) *Subject - -// Intent metadata -type IntentMeta struct { - Type string // "action", "question", "info" - Verb string // Reference to common.verb.* - Dangerous bool // Requires confirmation - Default string // "yes" or "no" - Supports []string // Extra options like "all", "skip" -} -``` - -## CLI Integration - -The CLI package uses `T()` internally for prompts: - -```go -// Confirm uses T() internally -confirmed := cli.Confirm("core.delete", i18n.S("file", path)) -// Internally: result := i18n.T("core.delete", subject) -// Displays: result.Question + localized [y/N] -// Returns: bool - -// Question with options -choice := cli.Question("core.save", i18n.S("changes", 3).Count(3), cli.Options{ - Default: "yes", - Extra: []string{"all"}, -}) -// Displays: "Save 3 changes? [a/y/N]" -// Returns: "yes" | "no" | "all" - -// Choice from list -selected := cli.Choose("core.select.branch", branches) -// Displays localized prompt with arrow selection -``` - -### cli.Confirm() - -```go -func Confirm(intent string, subject *i18n.Subject, opts ...ConfirmOption) bool - -// Options -cli.DefaultYes() // Default to yes instead of no -cli.DefaultNo() // Explicit default no -cli.Required() // No default, must choose -cli.Timeout(30*time.Second) // Auto-select default after timeout -``` - -### cli.Question() - -```go -func Question(intent string, subject *i18n.Subject, opts ...QuestionOption) string - -// Options -cli.Extra("all", "skip") // Extra options beyond y/n -cli.Default("yes") // Which option is default -cli.Validate(func(s string) bool) // Custom validation -``` - -### cli.Choose() - -```go -func Choose[T any](intent string, items []T, opts ...ChooseOption) T - -// Options -cli.Display(func(T) string) // How to display each item -cli.Filter() // Enable fuzzy filtering -cli.Multi() // Allow multiple selection -``` - -## Reserved Namespaces - -### `common.*` - Reusable Fragments - -Atomic translation units that can be composed: - -```json -{ - "common": { - "verb": { - "edit": "edit", - "delete": "delete", - "create": "create", - "save": "save", - "update": "update", - "commit": "commit" - }, - "noun": { - "file": { "one": "file", "other": "files" }, - "commit": { "one": "commit", "other": "commits" }, - "change": { "one": "change", "other": "changes" } - }, - "article": { - "the": "the", - "a": { "one": "a", "vowel": "an" } - }, - "prompt": { - "yes": "y", - "no": "n", - "all": "a", - "skip": "s", - "quit": "q" - } - } -} -``` - -### `core.*` - Semantic Intents - -Intents encode meaning and behavior: - -```json -{ - "core": { - "edit": { - "_meta": { - "type": "action", - "verb": "common.verb.edit", - "dangerous": false - }, - "question": "Should I {{.Verb}} {{.Subject}}?", - "confirm": "{{.Verb | title}} {{.Subject}}?", - "success": "{{.Subject | title}} {{.Verb | past}}", - "failure": "Failed to {{.Verb}} {{.Subject}}" - }, - "delete": { - "_meta": { - "type": "action", - "verb": "common.verb.delete", - "dangerous": true, - "default": "no" - }, - "question": "Delete {{.Subject}}? This cannot be undone.", - "confirm": "Really delete {{.Subject}}?", - "success": "{{.Subject | title}} deleted", - "failure": "Failed to delete {{.Subject}}" - }, - "save": { - "_meta": { - "type": "action", - "verb": "common.verb.save", - "supports": ["all", "skip"] - }, - "question": "Save {{.Subject}}?", - "success": "{{.Subject | title}} saved" - }, - "commit": { - "_meta": { - "type": "action", - "verb": "common.verb.commit", - "dangerous": false - }, - "question": "Commit {{.Subject}}?", - "success": "{{.Subject | title}} committed", - "failure": "Failed to commit {{.Subject}}" - } - } -} -``` - -## Template Functions - -Available in translation templates: - -| Function | Description | Example | -|----------|-------------|---------| -| `title` | Title case | `{{.Name \| title}}` → "Hello World" | -| `lower` | Lower case | `{{.Name \| lower}}` → "hello world" | -| `upper` | Upper case | `{{.Name \| upper}}` → "HELLO WORLD" | -| `past` | Past tense verb | `{{.Verb \| past}}` → "edited" | -| `plural` | Pluralize noun | `{{.Noun \| plural .Count}}` → "files" | -| `article` | Add article | `{{.Noun \| article}}` → "a file" | -| `quote` | Wrap in quotes | `{{.Path \| quote}}` → `"/path/to/file"` | - -## Implementation Plan - -### Phase 1: Foundation -1. Define `Composed` and `Subject` types -2. Add `S()` / `Subject()` builder -3. Add `T()` / `C()` with intent resolution -4. Parse `_meta` from JSON -5. Add template functions (title, lower, past, etc.) - -### Phase 2: CLI Integration -1. Implement `cli.Confirm()` using intents -2. Implement `cli.Question()` with options -3. Implement `cli.Choose()` for lists -4. Localize prompt characters [y/N] → [j/N] etc. - -### Phase 3: Grammar Engine -1. Verb conjugation (past tense, etc.) -2. Noun plurality with irregular forms -3. Article selection (a/an, gender) -4. Language-specific rules - -### Phase 4: Extended Languages -1. Gender agreement (French, German, etc.) -2. Formality levels (Japanese, Korean, etc.) -3. Right-to-left support -4. Plural forms beyond one/other (Russian, Arabic, etc.) - -## Example: Full Flow - -```go -// In cmd/dev/dev_commit.go -path := "/Users/dev/project" -files := []string{"main.go", "config.yaml"} - -// Old way (hardcoded English, manual prompt handling) -fmt.Printf("Commit %d files in %s? [y/N] ", len(files), path) -var response string -fmt.Scanln(&response) -if response != "y" && response != "Y" { - return -} - -// New way (semantic, localized, integrated) -if !cli.Confirm("core.commit", i18n.S("file", path).Count(len(files))) { - return -} - -// For German user, displays: -// "2 Dateien in /Users/dev/project committen? [j/N]" -// (note: "j" for "ja" instead of "y" for "yes") -``` - -## JSON Schema - -```json -{ - "$schema": "https://json-schema.org/draft/2020-12/schema", - "type": "object", - "properties": { - "common": { - "description": "Reusable translation fragments", - "type": "object" - }, - "core": { - "description": "Semantic intents with metadata", - "type": "object", - "additionalProperties": { - "type": "object", - "properties": { - "_meta": { - "type": "object", - "properties": { - "type": { "enum": ["action", "question", "info"] }, - "verb": { "type": "string" }, - "dangerous": { "type": "boolean" }, - "default": { "enum": ["yes", "no"] }, - "supports": { "type": "array", "items": { "type": "string" } } - } - }, - "question": { "type": "string" }, - "confirm": { "type": "string" }, - "success": { "type": "string" }, - "failure": { "type": "string" } - } - } - } - } -} -``` - -## Grammar Fundamentals - -Parts of speech we need to handle: - -| Part | Role | Example | Transforms | -|------|------|---------|------------| -| **Verb** | Action | delete, save, commit | tense (past/present), mood (imperative) | -| **Noun** | Subject/Object | file, commit, user | plurality, gender, case | -| **Article** | Determiner | a/an, the | vowel-awareness, gender agreement | -| **Adjective** | Describes noun | modified, new, deleted | gender/number agreement | -| **Preposition** | Relation | in, from, to | - | - -### Verb Conjugation - -```json -{ - "common": { - "verb": { - "delete": { - "base": "delete", - "past": "deleted", - "gerund": "deleting", - "imperative": "delete" - } - } - } -} -``` - -For most English verbs, derive automatically: -- `past`: base + "ed" (or irregular lookup) -- `gerund`: base + "ing" - -### Noun Handling - -```json -{ - "common": { - "noun": { - "file": { - "one": "file", - "other": "files", - "gender": "neuter" - } - } - } -} -``` - -### Article Selection - -English: a/an based on next word's sound (not letter) -- "a file", "an item", "a user", "an hour" - -Other languages: gender agreement (der/die/das, le/la, etc.) - -## DX Improvements - -### 1. Compile-Time Validation -- `go generate` checks all `T("core.X")` calls have matching JSON keys -- Warns on missing `_meta` fields -- Type-checks template variables - -### 2. IDE Support -- JSON schema for autocomplete in translation files -- Go constants generated from JSON keys: `i18n.CoreDelete` instead of `"core.delete"` - -### 3. Fallback Chain -``` -T("core.delete", subject) - → try core.delete.question - → try core.delete (plain string) - → try common.action.delete - → return "Delete {{.Subject}}?" (hardcoded fallback) -``` - -### 4. Debug Mode -```go -i18n.Debug(true) // Shows: [core.delete] Delete file.txt? -``` - -### 5. Short Subject Syntax -```go -// Instead of: -i18n.T("core.delete", i18n.S("file", path)) - -// Allow: -i18n.T("core.delete", path) // Infers subject type from intent's expected noun -``` - -### 6. Fluent Chaining -```go -i18n.T("core.delete"). - Subject("file", path). - Count(3). - Question() // Returns just the question string -``` - -## Notes for Future Implementation - -- Use `github.com/gertd/go-pluralize` for English plurality -- Consider `github.com/nicksnyder/go-i18n` patterns for CLDR plural rules -- Store compiled templates in sync.Map for caching -- `_meta` parsing happens once at load time, not per-call -- CLI prompt chars from `common.prompt.*` - allows `[j/N]` for German - -## Open Questions - -1. **Verb conjugation library** - Use existing Go library or build custom? -2. **Gender detection** - How to infer gender for subjects in gendered languages? -3. **Fallback behavior** - What happens when intent metadata is missing? -4. **Caching** - Should compiled templates be cached? -5. **Validation** - How to validate intent definitions at build time? diff --git a/docs/plans/2026-01-31-semantic-cli-output.md b/docs/plans/2026-01-31-semantic-cli-output.md deleted file mode 100644 index 23f886c..0000000 --- a/docs/plans/2026-01-31-semantic-cli-output.md +++ /dev/null @@ -1,1685 +0,0 @@ -# Semantic CLI Output Abstraction - -> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. - -**Goal:** Zero external dependencies for CLI output. Consuming code only imports `cli` - no `fmt`, `i18n`, or `lipgloss`. - -**Restore Point:** `96eaed5` - all deleted code recoverable from git history. - -**Architecture:** -- Internal ANSI styling (~100 lines replaces lipgloss) -- Glyph system with themes (unicode/emoji/ascii) -- Semantic output functions (`cli.Success`, `cli.Error`, `cli.Progress`) -- HLCRF layout system for structured output (ported from RFC-001) -- Simple stdin prompts (replaces huh wizard) - -**Tech Stack:** Go standard library only. Zero external dependencies for CLI output. - -**Reference:** RFC-001-HLCRF-COMPOSITOR.md (lab/host.uk.com/doc/rfc/) - ---- - -## Design Decisions - -### 1. Explicit Styled Functions (NOT Prefix Detection) - -The codebase uses keys like `cmd.dev.ci.short`, not `i18n.success.*`. Instead of prefix detection, use explicit functions: - -```go -cli.Success("Build complete") // ✓ Build complete (green) -cli.Error("Connection failed") // ✗ Connection failed (red) -cli.Warn("Rate limited") // ⚠ Rate limited (amber) -cli.Info("Connecting...") // ℹ Connecting... (blue) - -// With i18n -cli.Success(i18n.T("build.complete")) // Caller handles translation -cli.Echo(key, args...) // Just translate + print, no styling -``` - -### 2. Delete-and-Replace Approach - -No backward compatibility. Delete all lipgloss-based code, rewrite with internal ANSI: -- Delete `var Style = struct {...}` namespace (output.go) -- Delete all 50+ helper functions (styles.go) -- Delete `Symbol*` constants - replaced by glyph system -- Delete `Table` struct - rewrite with internal styling - -### 3. Glyph System Replaces Symbol Constants - -```go -// Before (styles.go) -const SymbolCheck = "✓" -fmt.Print(SuccessStyle.Render(SymbolCheck)) - -// After -cli.Success("Done") // Internally uses Glyph(":check:") -cli.Print(":check: Done") // Or explicit glyph -``` - -### 4. Simple Wizard Prompts - -Replace huh forms with basic stdin: - -```go -cli.Prompt("Project name", "my-project") // text input -cli.Confirm("Continue?") // y/n -cli.Select("Choose", []string{"a", "b"}) // numbered list -``` - ---- - -## Phase -1: Zero-Dependency ANSI Styling - -### Why - -Current dependencies for ANSI escape codes: -- `lipgloss` → 15 transitive deps -- `huh` → 30 transitive deps -- Supply chain attack surface: ~45 packages - -What we actually use: `style.Bold(true).Foreground(color).Render(text)` - -This is ~100 lines of ANSI codes. We own it completely. - -### Task -1.1: ANSI Style Package - -**Files:** -- Create: `pkg/cli/ansi.go` - -**Step 1: Create ansi.go with complete implementation** - -```go -package cli - -import ( - "fmt" - "strconv" - "strings" -) - -// ANSI escape codes -const ( - ansiReset = "\033[0m" - ansiBold = "\033[1m" - ansiDim = "\033[2m" - ansiItalic = "\033[3m" - ansiUnderline = "\033[4m" -) - -// AnsiStyle represents terminal text styling. -// Use NewStyle() to create, chain methods, call Render(). -type AnsiStyle struct { - bold bool - dim bool - italic bool - underline bool - fg string - bg string -} - -// NewStyle creates a new empty style. -func NewStyle() *AnsiStyle { - return &AnsiStyle{} -} - -// Bold enables bold text. -func (s *AnsiStyle) Bold() *AnsiStyle { - s.bold = true - return s -} - -// Dim enables dim text. -func (s *AnsiStyle) Dim() *AnsiStyle { - s.dim = true - return s -} - -// Italic enables italic text. -func (s *AnsiStyle) Italic() *AnsiStyle { - s.italic = true - return s -} - -// Underline enables underlined text. -func (s *AnsiStyle) Underline() *AnsiStyle { - s.underline = true - return s -} - -// Foreground sets foreground color from hex string. -func (s *AnsiStyle) Foreground(hex string) *AnsiStyle { - s.fg = fgColorHex(hex) - return s -} - -// Background sets background color from hex string. -func (s *AnsiStyle) Background(hex string) *AnsiStyle { - s.bg = bgColorHex(hex) - return s -} - -// Render applies the style to text. -func (s *AnsiStyle) Render(text string) string { - if s == nil { - return text - } - - var codes []string - if s.bold { - codes = append(codes, ansiBold) - } - if s.dim { - codes = append(codes, ansiDim) - } - if s.italic { - codes = append(codes, ansiItalic) - } - if s.underline { - codes = append(codes, ansiUnderline) - } - if s.fg != "" { - codes = append(codes, s.fg) - } - if s.bg != "" { - codes = append(codes, s.bg) - } - - if len(codes) == 0 { - return text - } - - return strings.Join(codes, "") + text + ansiReset -} - -// Hex color support -func fgColorHex(hex string) string { - r, g, b := hexToRGB(hex) - return fmt.Sprintf("\033[38;2;%d;%d;%dm", r, g, b) -} - -func bgColorHex(hex string) string { - r, g, b := hexToRGB(hex) - return fmt.Sprintf("\033[48;2;%d;%d;%dm", r, g, b) -} - -func hexToRGB(hex string) (int, int, int) { - hex = strings.TrimPrefix(hex, "#") - if len(hex) != 6 { - return 255, 255, 255 - } - r, _ := strconv.ParseInt(hex[0:2], 16, 64) - g, _ := strconv.ParseInt(hex[2:4], 16, 64) - b, _ := strconv.ParseInt(hex[4:6], 16, 64) - return int(r), int(g), int(b) -} -``` - -**Step 2: Verify build** - -Run: `go build ./pkg/cli/...` -Expected: PASS - -**Step 3: Commit** - -```bash -git add pkg/cli/ansi.go -git commit -m "feat(cli): add zero-dependency ANSI styling - -Replaces lipgloss with ~100 lines of owned code. -Supports bold, dim, italic, underline, RGB/hex colors. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task -1.2: Rewrite styles.go - -**Files:** -- Rewrite: `pkg/cli/styles.go` (delete 672 lines, write ~150) - -**Step 1: Delete entire file content and rewrite** - -```go -// Package cli provides semantic CLI output with zero external dependencies. -package cli - -import ( - "fmt" - "strings" - "time" -) - -// Tailwind colour palette (hex strings) -const ( - ColourBlue50 = "#eff6ff" - ColourBlue100 = "#dbeafe" - ColourBlue200 = "#bfdbfe" - ColourBlue300 = "#93c5fd" - ColourBlue400 = "#60a5fa" - ColourBlue500 = "#3b82f6" - ColourBlue600 = "#2563eb" - ColourBlue700 = "#1d4ed8" - ColourGreen400 = "#4ade80" - ColourGreen500 = "#22c55e" - ColourGreen600 = "#16a34a" - ColourRed400 = "#f87171" - ColourRed500 = "#ef4444" - ColourRed600 = "#dc2626" - ColourAmber400 = "#fbbf24" - ColourAmber500 = "#f59e0b" - ColourAmber600 = "#d97706" - ColourOrange500 = "#f97316" - ColourYellow500 = "#eab308" - ColourEmerald500= "#10b981" - ColourPurple500 = "#a855f7" - ColourViolet400 = "#a78bfa" - ColourViolet500 = "#8b5cf6" - ColourIndigo500 = "#6366f1" - ColourCyan500 = "#06b6d4" - ColourGray50 = "#f9fafb" - ColourGray100 = "#f3f4f6" - ColourGray200 = "#e5e7eb" - ColourGray300 = "#d1d5db" - ColourGray400 = "#9ca3af" - ColourGray500 = "#6b7280" - ColourGray600 = "#4b5563" - ColourGray700 = "#374151" - ColourGray800 = "#1f2937" - ColourGray900 = "#111827" -) - -// Core styles -var ( - SuccessStyle = NewStyle().Bold().Foreground(ColourGreen500) - ErrorStyle = NewStyle().Bold().Foreground(ColourRed500) - WarningStyle = NewStyle().Bold().Foreground(ColourAmber500) - InfoStyle = NewStyle().Foreground(ColourBlue400) - DimStyle = NewStyle().Dim().Foreground(ColourGray500) - MutedStyle = NewStyle().Foreground(ColourGray600) - BoldStyle = NewStyle().Bold() - KeyStyle = NewStyle().Foreground(ColourGray400) - ValueStyle = NewStyle().Foreground(ColourGray200) - AccentStyle = NewStyle().Foreground(ColourCyan500) - LinkStyle = NewStyle().Foreground(ColourBlue500).Underline() - HeaderStyle = NewStyle().Bold().Foreground(ColourGray200) - TitleStyle = NewStyle().Bold().Foreground(ColourBlue500) - CodeStyle = NewStyle().Foreground(ColourGray300) - NumberStyle = NewStyle().Foreground(ColourBlue300) - RepoStyle = NewStyle().Bold().Foreground(ColourBlue500) -) - -// Truncate shortens a string to max length with ellipsis. -func Truncate(s string, max int) string { - if len(s) <= max { - return s - } - if max <= 3 { - return s[:max] - } - return s[:max-3] + "..." -} - -// Pad right-pads a string to width. -func Pad(s string, width int) string { - if len(s) >= width { - return s - } - return s + strings.Repeat(" ", width-len(s)) -} - -// FormatAge formats a time as human-readable age (e.g., "2h ago", "3d ago"). -func FormatAge(t time.Time) string { - d := time.Since(t) - switch { - case d < time.Minute: - return "just now" - case d < time.Hour: - return fmt.Sprintf("%dm ago", int(d.Minutes())) - case d < 24*time.Hour: - return fmt.Sprintf("%dh ago", int(d.Hours())) - case d < 7*24*time.Hour: - return fmt.Sprintf("%dd ago", int(d.Hours()/24)) - case d < 30*24*time.Hour: - return fmt.Sprintf("%dw ago", int(d.Hours()/(24*7))) - default: - return fmt.Sprintf("%dmo ago", int(d.Hours()/(24*30))) - } -} - -// Table renders tabular data with aligned columns. -// HLCRF is for layout; Table is for tabular data - they serve different purposes. -type Table struct { - Headers []string - Rows [][]string - Style TableStyle -} - -type TableStyle struct { - HeaderStyle *AnsiStyle - CellStyle *AnsiStyle - Separator string -} - -// DefaultTableStyle returns sensible defaults. -func DefaultTableStyle() TableStyle { - return TableStyle{ - HeaderStyle: HeaderStyle, - CellStyle: nil, - Separator: " ", - } -} - -// NewTable creates a table with headers. -func NewTable(headers ...string) *Table { - return &Table{ - Headers: headers, - Style: DefaultTableStyle(), - } -} - -// AddRow adds a row to the table. -func (t *Table) AddRow(cells ...string) *Table { - t.Rows = append(t.Rows, cells) - return t -} - -// String renders the table. -func (t *Table) String() string { - if len(t.Headers) == 0 && len(t.Rows) == 0 { - return "" - } - - // Calculate column widths - cols := len(t.Headers) - if cols == 0 && len(t.Rows) > 0 { - cols = len(t.Rows[0]) - } - widths := make([]int, cols) - - for i, h := range t.Headers { - if len(h) > widths[i] { - widths[i] = len(h) - } - } - for _, row := range t.Rows { - for i, cell := range row { - if i < cols && len(cell) > widths[i] { - widths[i] = len(cell) - } - } - } - - var sb strings.Builder - sep := t.Style.Separator - - // Headers - if len(t.Headers) > 0 { - for i, h := range t.Headers { - if i > 0 { - sb.WriteString(sep) - } - styled := Pad(h, widths[i]) - if t.Style.HeaderStyle != nil { - styled = t.Style.HeaderStyle.Render(styled) - } - sb.WriteString(styled) - } - sb.WriteString("\n") - } - - // Rows - for _, row := range t.Rows { - for i, cell := range row { - if i > 0 { - sb.WriteString(sep) - } - styled := Pad(cell, widths[i]) - if t.Style.CellStyle != nil { - styled = t.Style.CellStyle.Render(styled) - } - sb.WriteString(styled) - } - sb.WriteString("\n") - } - - return sb.String() -} - -// Render prints the table to stdout. -func (t *Table) Render() { - fmt.Print(t.String()) -} -``` - -**Step 2: Verify build** - -Run: `go build ./pkg/cli/...` -Expected: PASS - -**Step 3: Commit** - -```bash -git add pkg/cli/styles.go -git commit -m "refactor(cli): rewrite styles with zero-dep ANSI - -Deletes 672 lines of lipgloss code, replaces with ~150 lines. -Previous code available at 96eaed5 if needed. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task -1.3: Rewrite output.go - -**Files:** -- Rewrite: `pkg/cli/output.go` (delete Style namespace, add semantic functions) - -**Step 1: Delete entire file content and rewrite** - -```go -package cli - -import ( - "fmt" - - "github.com/host-uk/core/pkg/i18n" -) - -// Blank prints an empty line. -func Blank() { - fmt.Println() -} - -// Echo translates a key via i18n.T and prints with newline. -// No automatic styling - use Success/Error/Warn/Info for styled output. -func Echo(key string, args ...any) { - fmt.Println(i18n.T(key, args...)) -} - -// Print outputs formatted text (no newline). -// Glyph shortcodes like :check: are converted. -func Print(format string, args ...any) { - fmt.Print(compileGlyphs(fmt.Sprintf(format, args...))) -} - -// Println outputs formatted text with newline. -// Glyph shortcodes like :check: are converted. -func Println(format string, args ...any) { - fmt.Println(compileGlyphs(fmt.Sprintf(format, args...))) -} - -// Success prints a success message with checkmark (green). -func Success(msg string) { - fmt.Println(SuccessStyle.Render(Glyph(":check:") + " " + msg)) -} - -// Successf prints a formatted success message. -func Successf(format string, args ...any) { - Success(fmt.Sprintf(format, args...)) -} - -// Error prints an error message with cross (red). -func Error(msg string) { - fmt.Println(ErrorStyle.Render(Glyph(":cross:") + " " + msg)) -} - -// Errorf prints a formatted error message. -func Errorf(format string, args ...any) { - Error(fmt.Sprintf(format, args...)) -} - -// Warn prints a warning message with warning symbol (amber). -func Warn(msg string) { - fmt.Println(WarningStyle.Render(Glyph(":warn:") + " " + msg)) -} - -// Warnf prints a formatted warning message. -func Warnf(format string, args ...any) { - Warn(fmt.Sprintf(format, args...)) -} - -// Info prints an info message with info symbol (blue). -func Info(msg string) { - fmt.Println(InfoStyle.Render(Glyph(":info:") + " " + msg)) -} - -// Infof prints a formatted info message. -func Infof(format string, args ...any) { - Info(fmt.Sprintf(format, args...)) -} - -// Dim prints dimmed text. -func Dim(msg string) { - fmt.Println(DimStyle.Render(msg)) -} - -// Progress prints a progress indicator that overwrites the current line. -// Uses i18n.Progress for gerund form ("Checking..."). -func Progress(verb string, current, total int, item ...string) { - msg := i18n.Progress(verb) - if len(item) > 0 && item[0] != "" { - fmt.Printf("\033[2K\r%s %d/%d %s", DimStyle.Render(msg), current, total, item[0]) - } else { - fmt.Printf("\033[2K\r%s %d/%d", DimStyle.Render(msg), current, total) - } -} - -// ProgressDone clears the progress line. -func ProgressDone() { - fmt.Print("\033[2K\r") -} - -// Label prints a "Label: value" line. -func Label(word, value string) { - fmt.Printf("%s %s\n", KeyStyle.Render(i18n.Label(word)), value) -} - -// Scanln reads from stdin. -func Scanln(a ...any) (int, error) { - return fmt.Scanln(a...) -} -``` - -**Step 2: Verify build** - -Run: `go build ./pkg/cli/...` -Expected: PASS - -**Step 3: Commit** - -```bash -git add pkg/cli/output.go -git commit -m "refactor(cli): rewrite output with semantic functions - -Replaces Style namespace with explicit Success/Error/Warn/Info. -Previous code available at 96eaed5 if needed. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task -1.4: Rewrite strings.go - -**Files:** -- Rewrite: `pkg/cli/strings.go` (remove lipgloss import) - -**Step 1: Delete and rewrite** - -```go -package cli - -import "fmt" - -// Sprintf formats a string (fmt.Sprintf wrapper). -func Sprintf(format string, args ...any) string { - return fmt.Sprintf(format, args...) -} - -// Sprint formats using default formats (fmt.Sprint wrapper). -func Sprint(args ...any) string { - return fmt.Sprint(args...) -} - -// Styled returns text with a style applied. -func Styled(style *AnsiStyle, text string) string { - return style.Render(text) -} - -// Styledf returns formatted text with a style applied. -func Styledf(style *AnsiStyle, format string, args ...any) string { - return style.Render(fmt.Sprintf(format, args...)) -} - -// SuccessStr returns success-styled string. -func SuccessStr(msg string) string { - return SuccessStyle.Render(Glyph(":check:") + " " + msg) -} - -// ErrorStr returns error-styled string. -func ErrorStr(msg string) string { - return ErrorStyle.Render(Glyph(":cross:") + " " + msg) -} - -// WarnStr returns warning-styled string. -func WarnStr(msg string) string { - return WarningStyle.Render(Glyph(":warn:") + " " + msg) -} - -// InfoStr returns info-styled string. -func InfoStr(msg string) string { - return InfoStyle.Render(Glyph(":info:") + " " + msg) -} - -// DimStr returns dim-styled string. -func DimStr(msg string) string { - return DimStyle.Render(msg) -} -``` - -**Step 2: Verify build** - -Run: `go build ./pkg/cli/...` -Expected: PASS - -**Step 3: Commit** - -```bash -git add pkg/cli/strings.go -git commit -m "refactor(cli): rewrite strings with zero-dep styling - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task -1.5: Update errors.go - -**Files:** -- Modify: `pkg/cli/errors.go` - -**Step 1: Replace SymbolCross with Glyph** - -```go -// Before -fmt.Println(ErrorStyle.Render(SymbolCross + " " + msg)) - -// After -fmt.Println(ErrorStyle.Render(Glyph(":cross:") + " " + msg)) -``` - -Apply to: `Fatalf`, `FatalWrap`, `FatalWrapVerb` - -**Step 2: Verify build** - -Run: `go build ./pkg/cli/...` -Expected: PASS - -**Step 3: Commit** - -```bash -git add pkg/cli/errors.go -git commit -m "refactor(cli): update errors to use glyph system - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task -1.6: Migrate pkg/php and pkg/vm - -**Files:** -- Modify: `pkg/php/cmd_quality.go` -- Modify: `pkg/php/cmd_dev.go` -- Modify: `pkg/php/cmd.go` -- Modify: `pkg/vm/cmd_vm.go` - -**Step 1: Replace lipgloss imports with cli** - -In each file: -- Remove `"github.com/charmbracelet/lipgloss"` import -- Replace `lipgloss.NewStyle()...` with `cli.NewStyle()...` -- Replace colour references: `lipgloss.Color(...)` → hex string - -**Step 2: Verify build** - -Run: `go build ./pkg/php/... ./pkg/vm/...` -Expected: PASS - -**Step 3: Commit** - -```bash -git add pkg/php/*.go pkg/vm/*.go -git commit -m "refactor(php,vm): migrate to cli ANSI styling - -Removes direct lipgloss imports. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task -1.7: Simple Wizard Prompts - -**Files:** -- Create: `pkg/cli/prompt.go` -- Rewrite: `pkg/setup/cmd_wizard.go` - -**Step 1: Create prompt.go** - -```go -package cli - -import ( - "bufio" - "fmt" - "os" - "strconv" - "strings" -) - -var stdin = bufio.NewReader(os.Stdin) - -// Prompt asks for text input with a default value. -func Prompt(label, defaultVal string) (string, error) { - if defaultVal != "" { - fmt.Printf("%s [%s]: ", label, defaultVal) - } else { - fmt.Printf("%s: ", label) - } - - input, err := stdin.ReadString('\n') - if err != nil { - return "", err - } - - input = strings.TrimSpace(input) - if input == "" { - return defaultVal, nil - } - return input, nil -} - -// Confirm asks a yes/no question. -func Confirm(label string) (bool, error) { - fmt.Printf("%s [y/N]: ", label) - - input, err := stdin.ReadString('\n') - if err != nil { - return false, err - } - - input = strings.ToLower(strings.TrimSpace(input)) - return input == "y" || input == "yes", nil -} - -// Select presents numbered options and returns the selected value. -func Select(label string, options []string) (string, error) { - fmt.Println(label) - for i, opt := range options { - fmt.Printf(" %d. %s\n", i+1, opt) - } - fmt.Printf("Choose [1-%d]: ", len(options)) - - input, err := stdin.ReadString('\n') - if err != nil { - return "", err - } - - n, err := strconv.Atoi(strings.TrimSpace(input)) - if err != nil || n < 1 || n > len(options) { - return "", fmt.Errorf("invalid selection") - } - return options[n-1], nil -} - -// MultiSelect presents checkboxes (space-separated numbers). -func MultiSelect(label string, options []string) ([]string, error) { - fmt.Println(label) - for i, opt := range options { - fmt.Printf(" %d. %s\n", i+1, opt) - } - fmt.Printf("Choose (space-separated) [1-%d]: ", len(options)) - - input, err := stdin.ReadString('\n') - if err != nil { - return nil, err - } - - var selected []string - for _, s := range strings.Fields(input) { - n, err := strconv.Atoi(s) - if err != nil || n < 1 || n > len(options) { - continue - } - selected = append(selected, options[n-1]) - } - return selected, nil -} -``` - -**Step 2: Rewrite cmd_wizard.go to use simple prompts** - -Remove huh import, replace form calls with cli.Prompt/Confirm/Select/MultiSelect. - -**Step 3: Verify build** - -Run: `go build ./pkg/cli/... ./pkg/setup/...` -Expected: PASS - -**Step 4: Commit** - -```bash -git add pkg/cli/prompt.go pkg/setup/cmd_wizard.go -git commit -m "refactor(setup): replace huh with simple stdin prompts - -Removes ~30 transitive dependencies. -Previous wizard at 96eaed5 if needed. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task -1.8: Remove Charmbracelet from go.mod - -**Step 1: Run go mod tidy** - -```bash -go mod tidy -``` - -**Step 2: Verify no charmbracelet deps remain** - -Run: `grep charmbracelet go.mod` -Expected: No output - -**Step 3: Check binary size reduction** - -```bash -go build -o /tmp/core-new ./cmd/core-cli -ls -lh /tmp/core-new -``` - -**Step 4: Commit** - -```bash -git add go.mod go.sum -git commit -m "chore: remove charmbracelet dependencies - -Zero external dependencies for CLI output. -Binary size reduced. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -## Phase 0: HLCRF Layout System - -### Task 0.1: Layout Parser - -**Files:** -- Create: `pkg/cli/layout.go` - -**Step 1: Create layout.go** - -```go -package cli - -import "fmt" - -// Region represents one of the 5 HLCRF regions. -type Region rune - -const ( - RegionHeader Region = 'H' - RegionLeft Region = 'L' - RegionContent Region = 'C' - RegionRight Region = 'R' - RegionFooter Region = 'F' -) - -// Composite represents an HLCRF layout node. -type Composite struct { - variant string - path string - regions map[Region]*Slot - parent *Composite -} - -// Slot holds content for a region. -type Slot struct { - region Region - path string - blocks []Renderable - child *Composite -} - -// Renderable is anything that can be rendered to terminal. -type Renderable interface { - Render() string -} - -// StringBlock is a simple string that implements Renderable. -type StringBlock string - -func (s StringBlock) Render() string { return string(s) } - -// Layout creates a new layout from a variant string. -func Layout(variant string) *Composite { - c, err := ParseVariant(variant) - if err != nil { - return &Composite{variant: variant, regions: make(map[Region]*Slot)} - } - return c -} - -// ParseVariant parses a variant string like "H[LC]C[HCF]F". -func ParseVariant(variant string) (*Composite, error) { - c := &Composite{ - variant: variant, - path: "", - regions: make(map[Region]*Slot), - } - - i := 0 - for i < len(variant) { - r := Region(variant[i]) - if !isValidRegion(r) { - return nil, fmt.Errorf("invalid region: %c", r) - } - - slot := &Slot{region: r, path: string(r)} - c.regions[r] = slot - i++ - - if i < len(variant) && variant[i] == '[' { - end := findMatchingBracket(variant, i) - if end == -1 { - return nil, fmt.Errorf("unmatched bracket at %d", i) - } - nested, err := ParseVariant(variant[i+1 : end]) - if err != nil { - return nil, err - } - nested.path = string(r) + "-" - nested.parent = c - slot.child = nested - i = end + 1 - } - } - return c, nil -} - -func isValidRegion(r Region) bool { - return r == 'H' || r == 'L' || r == 'C' || r == 'R' || r == 'F' -} - -func findMatchingBracket(s string, start int) int { - depth := 0 - for i := start; i < len(s); i++ { - if s[i] == '[' { - depth++ - } else if s[i] == ']' { - depth-- - if depth == 0 { - return i - } - } - } - return -1 -} - -// H adds content to Header region. -func (c *Composite) H(items ...any) *Composite { c.addToRegion(RegionHeader, items...); return c } - -// L adds content to Left region. -func (c *Composite) L(items ...any) *Composite { c.addToRegion(RegionLeft, items...); return c } - -// C adds content to Content region. -func (c *Composite) C(items ...any) *Composite { c.addToRegion(RegionContent, items...); return c } - -// R adds content to Right region. -func (c *Composite) R(items ...any) *Composite { c.addToRegion(RegionRight, items...); return c } - -// F adds content to Footer region. -func (c *Composite) F(items ...any) *Composite { c.addToRegion(RegionFooter, items...); return c } - -func (c *Composite) addToRegion(r Region, items ...any) { - slot, ok := c.regions[r] - if !ok { - return - } - for _, item := range items { - slot.blocks = append(slot.blocks, toRenderable(item)) - } -} - -func toRenderable(item any) Renderable { - switch v := item.(type) { - case Renderable: - return v - case string: - return StringBlock(v) - default: - return StringBlock(fmt.Sprint(v)) - } -} -``` - -**Step 2: Verify build** - -Run: `go build ./pkg/cli/...` -Expected: PASS - -**Step 3: Commit** - -```bash -git add pkg/cli/layout.go -git commit -m "feat(cli): add HLCRF layout parser - -Implements RFC-001 compositor pattern for terminal output. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 0.2: Terminal Renderer - -**Files:** -- Create: `pkg/cli/render.go` - -**Step 1: Create render.go** - -```go -package cli - -import ( - "fmt" - "strings" -) - -// RenderStyle controls how layouts are rendered. -type RenderStyle int - -const ( - RenderFlat RenderStyle = iota // No borders - RenderSimple // --- separators - RenderBoxed // Unicode box drawing -) - -var currentRenderStyle = RenderFlat - -func UseRenderFlat() { currentRenderStyle = RenderFlat } -func UseRenderSimple() { currentRenderStyle = RenderSimple } -func UseRenderBoxed() { currentRenderStyle = RenderBoxed } - -// Render outputs the layout to terminal. -func (c *Composite) Render() { - fmt.Print(c.String()) -} - -// String returns the rendered layout. -func (c *Composite) String() string { - var sb strings.Builder - c.renderTo(&sb, 0) - return sb.String() -} - -func (c *Composite) renderTo(sb *strings.Builder, depth int) { - order := []Region{RegionHeader, RegionLeft, RegionContent, RegionRight, RegionFooter} - - var active []Region - for _, r := range order { - if slot, ok := c.regions[r]; ok { - if len(slot.blocks) > 0 || slot.child != nil { - active = append(active, r) - } - } - } - - for i, r := range active { - slot := c.regions[r] - if i > 0 && currentRenderStyle != RenderFlat { - c.renderSeparator(sb, depth) - } - c.renderSlot(sb, slot, depth) - } -} - -func (c *Composite) renderSeparator(sb *strings.Builder, depth int) { - indent := strings.Repeat(" ", depth) - switch currentRenderStyle { - case RenderBoxed: - sb.WriteString(indent + "├" + strings.Repeat("─", 40) + "┤\n") - case RenderSimple: - sb.WriteString(indent + strings.Repeat("─", 40) + "\n") - } -} - -func (c *Composite) renderSlot(sb *strings.Builder, slot *Slot, depth int) { - indent := strings.Repeat(" ", depth) - for _, block := range slot.blocks { - for _, line := range strings.Split(block.Render(), "\n") { - if line != "" { - sb.WriteString(indent + line + "\n") - } - } - } - if slot.child != nil { - slot.child.renderTo(sb, depth+1) - } -} -``` - -**Step 2: Verify build** - -Run: `go build ./pkg/cli/...` -Expected: PASS - -**Step 3: Commit** - -```bash -git add pkg/cli/render.go -git commit -m "feat(cli): add HLCRF terminal renderer - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -## Phase 1: Glyph System - -### Task 1.1: Glyph Core - -**Files:** -- Create: `pkg/cli/glyph.go` - -**Step 1: Create glyph.go** - -```go -package cli - -import ( - "bytes" - "unicode" -) - -// GlyphTheme defines which symbols to use. -type GlyphTheme int - -const ( - ThemeUnicode GlyphTheme = iota - ThemeEmoji - ThemeASCII -) - -var currentTheme = ThemeUnicode - -func UseUnicode() { currentTheme = ThemeUnicode } -func UseEmoji() { currentTheme = ThemeEmoji } -func UseASCII() { currentTheme = ThemeASCII } - -func glyphMap() map[string]string { - switch currentTheme { - case ThemeEmoji: - return glyphMapEmoji - case ThemeASCII: - return glyphMapASCII - default: - return glyphMapUnicode - } -} - -// Glyph converts a shortcode to its symbol. -func Glyph(code string) string { - if sym, ok := glyphMap()[code]; ok { - return sym - } - return code -} - -func compileGlyphs(x string) string { - if x == "" { - return "" - } - input := bytes.NewBufferString(x) - output := bytes.NewBufferString("") - - for { - r, _, err := input.ReadRune() - if err != nil { - break - } - if r == ':' { - output.WriteString(replaceGlyph(input)) - } else { - output.WriteRune(r) - } - } - return output.String() -} - -func replaceGlyph(input *bytes.Buffer) string { - code := bytes.NewBufferString(":") - for { - r, _, err := input.ReadRune() - if err != nil { - return code.String() - } - if r == ':' && code.Len() == 1 { - return code.String() + replaceGlyph(input) - } - code.WriteRune(r) - if unicode.IsSpace(r) { - return code.String() - } - if r == ':' { - return Glyph(code.String()) - } - } -} -``` - -**Step 2: Verify build** - -Run: `go build ./pkg/cli/...` -Expected: PASS - -**Step 3: Commit** - -```bash -git add pkg/cli/glyph.go -git commit -m "feat(cli): add glyph shortcode system - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 1.2: Glyph Maps - -**Files:** -- Create: `pkg/cli/glyph_maps.go` - -**Step 1: Create glyph_maps.go** - -```go -package cli - -var glyphMapUnicode = map[string]string{ - ":check:": "✓", ":cross:": "✗", ":warn:": "⚠", ":info:": "ℹ", - ":question:": "?", ":skip:": "○", ":dot:": "●", ":circle:": "◯", - ":arrow_right:": "→", ":arrow_left:": "←", ":arrow_up:": "↑", ":arrow_down:": "↓", - ":pointer:": "▶", ":bullet:": "•", ":dash:": "─", ":pipe:": "│", - ":corner:": "└", ":tee:": "├", ":pending:": "…", ":spinner:": "⠋", -} - -var glyphMapEmoji = map[string]string{ - ":check:": "✅", ":cross:": "❌", ":warn:": "⚠️", ":info:": "ℹ️", - ":question:": "❓", ":skip:": "⏭️", ":dot:": "🔵", ":circle:": "⚪", - ":arrow_right:": "➡️", ":arrow_left:": "⬅️", ":arrow_up:": "⬆️", ":arrow_down:": "⬇️", - ":pointer:": "▶️", ":bullet:": "•", ":dash:": "─", ":pipe:": "│", - ":corner:": "└", ":tee:": "├", ":pending:": "⏳", ":spinner:": "🔄", -} - -var glyphMapASCII = map[string]string{ - ":check:": "[OK]", ":cross:": "[FAIL]", ":warn:": "[WARN]", ":info:": "[INFO]", - ":question:": "[?]", ":skip:": "[SKIP]", ":dot:": "[*]", ":circle:": "[ ]", - ":arrow_right:": "->", ":arrow_left:": "<-", ":arrow_up:": "^", ":arrow_down:": "v", - ":pointer:": ">", ":bullet:": "*", ":dash:": "-", ":pipe:": "|", - ":corner:": "`", ":tee:": "+", ":pending:": "...", ":spinner:": "-", -} -``` - -**Step 2: Verify build** - -Run: `go build ./pkg/cli/...` -Expected: PASS - -**Step 3: Commit** - -```bash -git add pkg/cli/glyph_maps.go -git commit -m "feat(cli): add glyph maps for unicode/emoji/ascii - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -## Phase 2: DX-Focused Semantic Output - -### Task 2.0: Semantic Patterns for Consuming Packages - -**Files:** -- Create: `pkg/cli/check.go` -- Modify: `pkg/cli/output.go` - -**Goal:** Eliminate display logic from consuming packages. Only `cli` knows about styling. - -**Step 1: Create check.go with fluent Check builder** - -```go -package cli - -import "fmt" - -// CheckBuilder provides fluent API for check results. -type CheckBuilder struct { - name string - status string - style *AnsiStyle - icon string - duration string -} - -// Check starts building a check result line. -// -// cli.Check("audit").Pass() -// cli.Check("fmt").Fail().Duration("2.3s") -// cli.Check("test").Skip() -func Check(name string) *CheckBuilder { - return &CheckBuilder{name: name} -} - -// Pass marks the check as passed. -func (c *CheckBuilder) Pass() *CheckBuilder { - c.status = "passed" - c.style = SuccessStyle - c.icon = Glyph(":check:") - return c -} - -// Fail marks the check as failed. -func (c *CheckBuilder) Fail() *CheckBuilder { - c.status = "failed" - c.style = ErrorStyle - c.icon = Glyph(":cross:") - return c -} - -// Skip marks the check as skipped. -func (c *CheckBuilder) Skip() *CheckBuilder { - c.status = "skipped" - c.style = DimStyle - c.icon = "-" - return c -} - -// Warn marks the check as warning. -func (c *CheckBuilder) Warn() *CheckBuilder { - c.status = "warning" - c.style = WarningStyle - c.icon = Glyph(":warn:") - return c -} - -// Duration adds duration to the check result. -func (c *CheckBuilder) Duration(d string) *CheckBuilder { - c.duration = d - return c -} - -// Message adds a custom message instead of status. -func (c *CheckBuilder) Message(msg string) *CheckBuilder { - c.status = msg - return c -} - -// String returns the formatted check line. -func (c *CheckBuilder) String() string { - icon := c.icon - if c.style != nil { - icon = c.style.Render(c.icon) - } - - status := c.status - if c.style != nil && c.status != "" { - status = c.style.Render(c.status) - } - - if c.duration != "" { - return fmt.Sprintf(" %s %-20s %-10s %s", icon, c.name, status, DimStyle.Render(c.duration)) - } - if status != "" { - return fmt.Sprintf(" %s %s %s", icon, c.name, status) - } - return fmt.Sprintf(" %s %s", icon, c.name) -} - -// Print outputs the check result. -func (c *CheckBuilder) Print() { - fmt.Println(c.String()) -} -``` - -**Step 2: Add semantic output functions to output.go** - -```go -// Task prints a task header: "[label] message" -// -// cli.Task("php", "Running tests...") // [php] Running tests... -// cli.Task("go", i18n.Progress("build")) // [go] Building... -func Task(label, message string) { - fmt.Printf("%s %s\n\n", DimStyle.Render("["+label+"]"), message) -} - -// Section prints a section header: "── SECTION ──" -// -// cli.Section("audit") // ── AUDIT ── -func Section(name string) { - header := "── " + strings.ToUpper(name) + " ──" - fmt.Println(AccentStyle.Render(header)) -} - -// Hint prints a labelled hint: "label: message" -// -// cli.Hint("install", "composer require vimeo/psalm") -// cli.Hint("fix", "core php fmt --fix") -func Hint(label, message string) { - fmt.Printf(" %s %s\n", DimStyle.Render(label+":"), message) -} - -// Severity prints a severity-styled message. -// -// cli.Severity("critical", "SQL injection") // red, bold -// cli.Severity("high", "XSS vulnerability") // orange -// cli.Severity("medium", "Missing CSRF") // amber -// cli.Severity("low", "Debug enabled") // gray -func Severity(level, message string) { - var style *AnsiStyle - switch strings.ToLower(level) { - case "critical": - style = NewStyle().Bold().Foreground(ColourRed500) - case "high": - style = NewStyle().Bold().Foreground(ColourOrange500) - case "medium": - style = NewStyle().Foreground(ColourAmber500) - case "low": - style = NewStyle().Foreground(ColourGray500) - default: - style = DimStyle - } - fmt.Printf(" %s %s\n", style.Render("["+level+"]"), message) -} - -// Result prints a result line: "✓ message" or "✗ message" -// -// cli.Result(passed, "All tests passed") -// cli.Result(false, "3 tests failed") -func Result(passed bool, message string) { - if passed { - Success(message) - } else { - Error(message) - } -} -``` - -**Step 3: Add strings import to output.go** - -```go -import ( - "fmt" - "strings" - - "github.com/host-uk/core/pkg/i18n" -) -``` - -**Step 4: Verify build** - -Run: `go build ./pkg/cli/...` -Expected: PASS - -**Step 5: Commit** - -```bash -git add pkg/cli/check.go pkg/cli/output.go -git commit -m "feat(cli): add DX-focused semantic output patterns - -- Check() fluent builder for check results -- Task() for task headers -- Section() for section headers -- Hint() for labelled hints -- Severity() for severity-styled output -- Result() for pass/fail results - -Consuming packages now have zero display logic. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -## Phase 3: Full Migration - -### Task 3.1: Migrate All pkg/* Files - -**Files:** All files in pkg/ that use: -- `i18n.T()` directly (should use `cli.Echo()`) -- `lipgloss.*` (should use `cli.*Style`) -- `fmt.Printf/Println` for output (should use `cli.Print/Println`) - -**Step 1: Find all files needing migration** - -```bash -grep -r "i18n\.T\|lipgloss\|fmt\.Print" pkg/ --include="*.go" | grep -v "pkg/cli/" | grep -v "_test.go" -``` - -**Step 2: Migrate each file** - -Pattern replacements: -- `fmt.Printf(...)` → `cli.Print(...)` -- `fmt.Println(...)` → `cli.Println(...)` -- `i18n.T("key")` → `cli.Echo("key")` or keep for values -- `successStyle.Render(...)` → `cli.SuccessStyle.Render(...)` - -**Step 3: Verify build** - -Run: `go build ./...` -Expected: PASS - -**Step 4: Commit** - -```bash -git add pkg/ -git commit -m "refactor: migrate all pkg/* to cli abstraction - -No direct fmt/i18n/lipgloss imports outside pkg/cli. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 3.2: Tests - -**Files:** -- Create: `pkg/cli/ansi_test.go` -- Create: `pkg/cli/glyph_test.go` -- Create: `pkg/cli/layout_test.go` - -**Step 1: Write tests** - -```go -// ansi_test.go -package cli - -import "testing" - -func TestAnsiStyle_Render(t *testing.T) { - s := NewStyle().Bold().Foreground("#ff0000") - got := s.Render("test") - if got == "test" { - t.Error("Expected styled output") - } - if !contains(got, "test") { - t.Error("Output should contain text") - } -} - -func contains(s, sub string) bool { - return len(s) >= len(sub) && s[len(s)-len(sub)-4:len(s)-4] == sub -} -``` - -**Step 2: Run tests** - -Run: `go test ./pkg/cli/... -v` -Expected: PASS - -**Step 3: Commit** - -```bash -git add pkg/cli/*_test.go -git commit -m "test(cli): add unit tests for ANSI, glyph, layout - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 3.3: Final Verification - -**Step 1: Full build** - -Run: `go build ./...` -Expected: PASS - -**Step 2: All tests** - -Run: `go test ./...` -Expected: PASS - -**Step 3: Verify zero charmbracelet** - -Run: `grep charmbracelet go.mod` -Expected: No output - -**Step 4: Binary test** - -Run: `./bin/core dev health` -Expected: Output displays correctly - ---- - -## Summary of New API - -| Function | Purpose | -|----------|---------| -| `cli.Blank()` | Empty line | -| `cli.Echo(key, args...)` | Translate + print | -| `cli.Print(fmt, args...)` | Printf with glyphs | -| `cli.Println(fmt, args...)` | Println with glyphs | -| `cli.Success(msg)` | ✓ green | -| `cli.Error(msg)` | ✗ red | -| `cli.Warn(msg)` | ⚠ amber | -| `cli.Info(msg)` | ℹ blue | -| `cli.Dim(msg)` | Dimmed text | -| `cli.Progress(verb, n, total)` | Overwriting progress | -| `cli.ProgressDone()` | Clear progress | -| `cli.Label(word, value)` | "Label: value" | -| `cli.Prompt(label, default)` | Text input | -| `cli.Confirm(label)` | y/n | -| `cli.Select(label, opts)` | Numbered list | -| `cli.MultiSelect(label, opts)` | Multi-select | -| `cli.Glyph(code)` | Get symbol | -| `cli.UseUnicode/Emoji/ASCII()` | Set theme | -| `cli.Layout(variant)` | HLCRF layout | -| `cli.NewTable(headers...)` | Create table | -| `cli.FormatAge(time)` | "2h ago" | -| `cli.Truncate(s, max)` | Ellipsis truncation | -| `cli.Pad(s, width)` | Right-pad string | -| **DX Patterns** | | -| `cli.Task(label, msg)` | `[php] Running...` | -| `cli.Section(name)` | `── AUDIT ──` | -| `cli.Check(name).Pass/Fail/Skip()` | Fluent check result | -| `cli.Hint(label, msg)` | `install: composer...` | -| `cli.Severity(level, msg)` | Critical/high/med/low | -| `cli.Result(ok, msg)` | Pass/fail result | diff --git a/docs/plans/2026-03-09-lint-pattern-catalog-design.md b/docs/plans/2026-03-09-lint-pattern-catalog-design.md new file mode 100644 index 0000000..0825791 --- /dev/null +++ b/docs/plans/2026-03-09-lint-pattern-catalog-design.md @@ -0,0 +1,261 @@ +# Lint Pattern Catalog & Polish Skill Design + +> **Partial implementation (14 Mar 2026):** Layer 1 (`core/lint` -- catalog, matcher, scanner, CLI) is fully implemented and documented at `docs/tools/lint/index.md`. Layer 2 (MCP subsystem in `go-ai`) and Layer 3 (Claude Code polish skill in `core/agent`) are NOT implemented. This plan is retained for those remaining layers. + +**Goal:** A structured pattern catalog (`core/lint`) that captures recurring code quality findings as regex rules, exposes them via MCP tools in `go-ai`, and orchestrates multi-AI code review via a Claude Code skill in `core/agent`. + +**Architecture:** Three layers — a standalone catalog+matcher library (`core/lint`), an MCP subsystem in `go-ai` that exposes lint tools to agents, and a Claude Code plugin in `core/agent` that orchestrates the "polish" workflow (deterministic checks + AI reviewers + feedback loop into the catalog). + +**Tech Stack:** Go (catalog, matcher, CLI, MCP subsystem), YAML (rule definitions), JSONL (findings output, compatible with `~/.core/ai/metrics/`), Claude Code plugin format (hooks.json, commands/*.md, plugin.json). + +--- + +## Context + +During a code review sweep of 18 Go repos (March 2026), AI reviewers (Gemini, Claude) found ~20 recurring patterns: SQL injection, path traversal, XSS, missing constant-time comparison, goroutine leaks, Go 1.26 modernisation opportunities, and more. Many of these patterns repeat across repos. + +Currently these findings exist only as commit messages. This design captures them as a reusable, machine-readable catalog that: +1. Deterministic tools can run immediately (regex matching) +2. MCP-connected agents can query and apply +3. LEM models can train on for "does this comply with CoreGo standards?" judgements +4. Grows automatically as AI reviewers find new patterns + +## Layer 1: `core/lint` — Pattern Catalog & Matcher + +### Repository Structure + +``` +core/lint/ +├── go.mod # forge.lthn.ai/core/lint +├── catalog/ +│ ├── go-security.yaml # SQL injection, path traversal, XSS, constant-time +│ ├── go-modernise.yaml # Go 1.26: slices.Clone, wg.Go, maps.Keys, range-over-int +│ ├── go-correctness.yaml # Deadlocks, goroutine leaks, nil guards, error handling +│ ├── php-security.yaml # XSS, CSRF, mass assignment, SQL injection +│ ├── ts-security.yaml # DOM XSS, prototype pollution +│ └── cpp-safety.yaml # Buffer overflow, use-after-free +├── pkg/lint/ +│ ├── catalog.go # Load + parse YAML catalog files +│ ├── rule.go # Rule struct definition +│ ├── matcher.go # Regex matcher against file contents +│ ├── report.go # Structured findings output (JSON/JSONL/text) +│ ├── catalog_test.go +│ ├── matcher_test.go +│ └── report_test.go +├── cmd/core-lint/ +│ └── main.go # `core-lint check ./...` CLI +└── .core/ + └── build.yaml # Produces core-lint binary +``` + +### Rule Schema (YAML) + +```yaml +- id: go-sec-001 + title: "SQL wildcard injection in LIKE clauses" + severity: high # critical, high, medium, low, info + languages: [go] + tags: [security, injection, owasp-a03] + pattern: 'LIKE\s+\?\s*,\s*["\x60]%\s*\+' + exclude_pattern: 'EscapeLike' # suppress if this also matches + fix: "Use parameterised LIKE with explicit escaping of % and _ characters" + found_in: [go-store] # repos where first discovered + example_bad: | + db.Where("name LIKE ?", "%"+input+"%") + example_good: | + db.Where("name LIKE ?", EscapeLike(input)) + first_seen: "2026-03-09" + detection: regex # future: ast, semantic + auto_fixable: false # future: true when we add codemods +``` + +### Rule Struct (Go) + +```go +type Rule struct { + ID string `yaml:"id"` + Title string `yaml:"title"` + Severity string `yaml:"severity"` + Languages []string `yaml:"languages"` + Tags []string `yaml:"tags"` + Pattern string `yaml:"pattern"` + ExcludePattern string `yaml:"exclude_pattern,omitempty"` + Fix string `yaml:"fix"` + FoundIn []string `yaml:"found_in,omitempty"` + ExampleBad string `yaml:"example_bad,omitempty"` + ExampleGood string `yaml:"example_good,omitempty"` + FirstSeen string `yaml:"first_seen"` + Detection string `yaml:"detection"` // regex | ast | semantic + AutoFixable bool `yaml:"auto_fixable"` +} +``` + +### Finding Struct (Go) + +Designed to align with go-ai's `ScanAlert` shape and `~/.core/ai/metrics/` JSONL format: + +```go +type Finding struct { + RuleID string `json:"rule_id"` + Title string `json:"title"` + Severity string `json:"severity"` + File string `json:"file"` + Line int `json:"line"` + Match string `json:"match"` // matched text + Fix string `json:"fix"` + Repo string `json:"repo,omitempty"` +} +``` + +### CLI Interface + +```bash +# Check current directory against all catalogs for detected languages +core-lint check ./... + +# Check specific languages/catalogs +core-lint check --lang go --catalog go-security ./pkg/... + +# Output as JSON (for piping to other tools) +core-lint check --format json ./... + +# List available rules +core-lint catalog list +core-lint catalog list --lang go --severity high + +# Show a specific rule with examples +core-lint catalog show go-sec-001 +``` + +## Layer 2: `go-ai` Lint MCP Subsystem + +New subsystem registered alongside files/rag/ml/brain: + +```go +type LintSubsystem struct { + catalog *lint.Catalog + root string // workspace root for scanning +} + +func (s *LintSubsystem) Name() string { return "lint" } + +func (s *LintSubsystem) RegisterTools(server *mcp.Server) { + // lint_check - run rules against workspace files + // lint_catalog - list/search available rules + // lint_report - get findings summary for a path +} +``` + +### MCP Tools + +| Tool | Input | Output | Group | +|------|-------|--------|-------| +| `lint_check` | `{path: string, lang?: string, severity?: string}` | `{findings: []Finding}` | lint | +| `lint_catalog` | `{lang?: string, tags?: []string, severity?: string}` | `{rules: []Rule}` | lint | +| `lint_report` | `{path: string, format?: "summary" or "detailed"}` | `{summary: ReportSummary}` | lint | + +This means any MCP-connected agent (Claude, LEM, Codex) can call `lint_check` to scan code against the catalog. + +## Layer 3: `core/agent` Polish Skill + +Claude Code plugin at `core/agent/claude/polish/`: + +``` +core/agent/claude/polish/ +├── plugin.json +├── hooks.json # optional: PostToolUse after git commit +├── commands/ +│ └── polish.md # /polish slash command +└── scripts/ + └── run-lint.sh # shells out to core-lint +``` + +### `/polish` Command Flow + +1. Run `core-lint check ./...` for fast deterministic findings +2. Report findings to user +3. Optionally run AI reviewers (Gemini CLI, Codex) for deeper analysis +4. Deduplicate AI findings against catalog (already-known patterns) +5. Propose new patterns as catalog additions (PR to core/lint) + +### Subagent Configuration (`.core/agents/`) + +Repos can configure polish behaviour: + +```yaml +# any-repo/.core/agents/polish.yaml +languages: [go] +catalogs: [go-security, go-modernise, go-correctness] +reviewers: [gemini] # which AI tools to invoke +exclude: [vendor/, testdata/, *_test.go] +severity_threshold: medium # only report medium+ findings +``` + +## Findings to LEM Pipeline + +``` +core-lint check -> findings.json + | + v +~/.core/ai/metrics/YYYY-MM-DD.jsonl (audit trail) + | + v +LEM training data: + - Rule examples (bad/good pairs) -> supervised training signal + - Finding frequency -> pattern importance weighting + - Rule descriptions -> natural language understanding of "why" + | + v +LEM tool: "does this code comply with CoreGo standards?" + -> queries lint_catalog via MCP + -> applies learned pattern recognition + -> reports violations with rule IDs and fixes +``` + +## Initial Catalog Seed + +From the March 2026 ecosystem sweep: + +| ID | Title | Severity | Language | Found In | +|----|-------|----------|----------|----------| +| go-sec-001 | SQL wildcard injection | high | go | go-store | +| go-sec-002 | Path traversal in cache keys | high | go | go-cache | +| go-sec-003 | XSS in HTML output | high | go | go-html | +| go-sec-004 | Non-constant-time auth comparison | high | go | go-crypt | +| go-sec-005 | Log injection via unescaped input | medium | go | go-log | +| go-sec-006 | Key material in log output | high | go | go-log | +| go-cor-001 | Goroutine leak (no WaitGroup) | high | go | core/go | +| go-cor-002 | Shutdown deadlock (wg.Wait no timeout) | high | go | core/go | +| go-cor-003 | Silent error swallowing | medium | go | go-process, go-ratelimit | +| go-cor-004 | Panic in library code | medium | go | go-i18n | +| go-cor-005 | Delete without path validation | high | go | go-io | +| go-mod-001 | Manual slice clone (append nil pattern) | low | go | core/go | +| go-mod-002 | Manual sort instead of slices.Sorted | low | go | core/go | +| go-mod-003 | Manual reverse loop instead of slices.Backward | low | go | core/go | +| go-mod-004 | sync.WaitGroup Add+Done instead of Go() | low | go | core/go | +| go-mod-005 | Manual map key collection instead of maps.Keys | low | go | core/go | +| go-cor-006 | Missing error return from API calls | medium | go | go-forge, go-git | +| go-cor-007 | Signal handler uses wrong type | medium | go | go-process | + +## Dependencies + +``` +core/lint (standalone, zero core deps) + ^ + | +go-ai/mcp/lint/ (imports core/lint for catalog + matcher) + ^ + | +core/agent/claude/polish/ (shells out to core-lint CLI) +``` + +`core/lint` has no dependency on `core/go` or any other framework module. It is a standalone library + CLI, like `core/go-io`. + +## Future Extensions (Not Built Now) + +- **AST-based detection** (layer 2): Parse Go/PHP AST, match structural patterns +- **Semantic detection** (layer 3): LEM judges code against rule descriptions +- **Auto-fix codemods**: `core-lint fix` applies known fixes automatically +- **CI integration**: GitHub Actions workflow runs `core-lint check` on PRs +- **CodeRabbit integration**: Import CodeRabbit findings as catalog entries +- **Cross-repo dashboard**: Aggregate findings across all repos in workspace diff --git a/docs/plans/2026-03-09-lint-pattern-catalog-plan.md b/docs/plans/2026-03-09-lint-pattern-catalog-plan.md new file mode 100644 index 0000000..7f1ddec --- /dev/null +++ b/docs/plans/2026-03-09-lint-pattern-catalog-plan.md @@ -0,0 +1,1668 @@ +# Lint Pattern Catalog Implementation Plan + +> **Fully implemented (14 Mar 2026).** All tasks in this plan are complete. The `core/lint` module ships 18 rules across 3 catalogs, with a working CLI and embedded YAML. This plan is retained alongside the design doc, which tracks the remaining MCP and polish skill layers. + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Build `core/lint` — a standalone Go library + CLI that loads YAML pattern catalogs and runs regex-based code checks, seeded with 18 patterns from the March 2026 ecosystem sweep. + +**Architecture:** Standalone Go module (`forge.lthn.ai/core/lint`) with zero framework deps. YAML catalog files define rules (id, severity, regex pattern, fix). `pkg/lint` loads catalogs and matches patterns against files. `cmd/core-lint` is a Cobra CLI. Uses `cli.Main()` + `cli.WithCommands()` from `core/cli`. + +**Tech Stack:** Go 1.26, `gopkg.in/yaml.v3` (YAML parsing), `forge.lthn.ai/core/cli` (CLI framework), `github.com/stretchr/testify` (testing), `embed` (catalog embedding). + +--- + +### Task 1: Create repo and Go module + +**Files:** +- Create: `/Users/snider/Code/core/lint/go.mod` +- Create: `/Users/snider/Code/core/lint/.core/build.yaml` +- Create: `/Users/snider/Code/core/lint/CLAUDE.md` + +**Step 1: Create repo on forge** + +```bash +ssh -p 2223 git@forge.lthn.ai +``` + +If SSH repo creation isn't available, create via Forgejo API: +```bash +curl -X POST "https://forge.lthn.ai/api/v1/orgs/core/repos" \ + -H "Authorization: token $FORGE_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"name":"lint","description":"Pattern catalog & regex matcher for code quality","auto_init":true,"default_branch":"main"}' +``` + +Or manually create on forge.lthn.ai web UI under the `core` org. + +**Step 2: Clone and initialise Go module** + +```bash +cd ~/Code/core +git clone ssh://git@forge.lthn.ai:2223/core/lint.git +cd lint +go mod init forge.lthn.ai/core/lint +``` + +Set Go version in go.mod: +``` +module forge.lthn.ai/core/lint + +go 1.26.0 +``` + +**Step 3: Create `.core/build.yaml`** + +```yaml +version: 1 + +project: + name: core-lint + description: Pattern catalog and regex code checker + main: ./cmd/core-lint + binary: core-lint + +build: + cgo: false + flags: + - -trimpath + ldflags: + - -s + - -w + +targets: + - os: linux + arch: amd64 + - os: linux + arch: arm64 + - os: darwin + arch: arm64 + - os: windows + arch: amd64 +``` + +**Step 4: Create `CLAUDE.md`** + +```markdown +# CLAUDE.md + +## Project Overview + +`core/lint` is a standalone pattern catalog and regex-based code checker. It loads YAML rule definitions and matches them against source files. Zero framework dependencies. + +## Build & Development + +```bash +core go test +core go qa +core build # produces ./bin/core-lint +``` + +## Architecture + +- `catalog/` — YAML rule files (embedded at compile time) +- `pkg/lint/` — Library: Rule, Catalog, Matcher, Report types +- `cmd/core-lint/` — CLI binary using `cli.Main()` + +## Rule Schema + +Each YAML file contains an array of rules with: id, title, severity, languages, tags, pattern (regex), exclude_pattern, fix, example_bad, example_good, detection type. + +## Coding Standards + +- UK English +- `declare(strict_types=1)` equivalent: all functions have typed params/returns +- Tests use testify +- License: EUPL-1.2 +``` + +**Step 5: Add to go.work** + +Add `./core/lint` to `~/Code/go.work` under the Core framework section. + +**Step 6: Commit** + +```bash +git add go.mod .core/ CLAUDE.md +git commit -m "feat: initialise core/lint module" +``` + +--- + +### Task 2: Rule struct and YAML parsing + +**Files:** +- Create: `/Users/snider/Code/core/lint/pkg/lint/rule.go` +- Create: `/Users/snider/Code/core/lint/pkg/lint/rule_test.go` + +**Step 1: Write the failing test** + +```go +package lint + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestParseRules(t *testing.T) { + yaml := ` +- id: test-001 + title: "Test rule" + severity: high + languages: [go] + tags: [security] + pattern: 'fmt\.Println' + fix: "Use structured logging" + detection: regex +` + rules, err := ParseRules([]byte(yaml)) + require.NoError(t, err) + require.Len(t, rules, 1) + assert.Equal(t, "test-001", rules[0].ID) + assert.Equal(t, "high", rules[0].Severity) + assert.Equal(t, []string{"go"}, rules[0].Languages) + assert.Equal(t, `fmt\.Println`, rules[0].Pattern) +} + +func TestParseRules_Invalid(t *testing.T) { + _, err := ParseRules([]byte("not: valid: yaml: [")) + assert.Error(t, err) +} + +func TestRule_Validate(t *testing.T) { + good := Rule{ID: "x-001", Title: "T", Severity: "high", Languages: []string{"go"}, Pattern: "foo", Detection: "regex"} + assert.NoError(t, good.Validate()) + + bad := Rule{} // missing required fields + assert.Error(t, bad.Validate()) +} + +func TestRule_Validate_BadRegex(t *testing.T) { + r := Rule{ID: "x-001", Title: "T", Severity: "high", Languages: []string{"go"}, Pattern: "[invalid", Detection: "regex"} + assert.Error(t, r.Validate()) +} +``` + +**Step 2: Run test to verify it fails** + +Run: `cd ~/Code/core/lint && go test ./pkg/lint/ -v` +Expected: FAIL — `ParseRules` and `Rule` not defined + +**Step 3: Write minimal implementation** + +```go +package lint + +import ( + "fmt" + "regexp" + + "gopkg.in/yaml.v3" +) + +// Rule defines a single lint pattern check. +type Rule struct { + ID string `yaml:"id" json:"id"` + Title string `yaml:"title" json:"title"` + Severity string `yaml:"severity" json:"severity"` + Languages []string `yaml:"languages" json:"languages"` + Tags []string `yaml:"tags" json:"tags"` + Pattern string `yaml:"pattern" json:"pattern"` + ExcludePattern string `yaml:"exclude_pattern" json:"exclude_pattern,omitempty"` + Fix string `yaml:"fix" json:"fix"` + FoundIn []string `yaml:"found_in" json:"found_in,omitempty"` + ExampleBad string `yaml:"example_bad" json:"example_bad,omitempty"` + ExampleGood string `yaml:"example_good" json:"example_good,omitempty"` + FirstSeen string `yaml:"first_seen" json:"first_seen,omitempty"` + Detection string `yaml:"detection" json:"detection"` + AutoFixable bool `yaml:"auto_fixable" json:"auto_fixable"` +} + +// Validate checks that a Rule has all required fields and a compilable regex pattern. +func (r *Rule) Validate() error { + if r.ID == "" { + return fmt.Errorf("rule missing id") + } + if r.Title == "" { + return fmt.Errorf("rule %s: missing title", r.ID) + } + if r.Severity == "" { + return fmt.Errorf("rule %s: missing severity", r.ID) + } + if len(r.Languages) == 0 { + return fmt.Errorf("rule %s: missing languages", r.ID) + } + if r.Pattern == "" { + return fmt.Errorf("rule %s: missing pattern", r.ID) + } + if r.Detection == "regex" { + if _, err := regexp.Compile(r.Pattern); err != nil { + return fmt.Errorf("rule %s: invalid regex: %w", r.ID, err) + } + } + return nil +} + +// ParseRules parses YAML bytes into a slice of Rules. +func ParseRules(data []byte) ([]Rule, error) { + var rules []Rule + if err := yaml.Unmarshal(data, &rules); err != nil { + return nil, fmt.Errorf("parse rules: %w", err) + } + return rules, nil +} +``` + +**Step 4: Run test to verify it passes** + +Run: `cd ~/Code/core/lint && go test ./pkg/lint/ -v` +Expected: PASS (4 tests) + +**Step 5: Add yaml dependency** + +```bash +cd ~/Code/core/lint && go get gopkg.in/yaml.v3 && go get github.com/stretchr/testify +``` + +**Step 6: Commit** + +```bash +git add pkg/lint/rule.go pkg/lint/rule_test.go go.mod go.sum +git commit -m "feat: add Rule struct with YAML parsing and validation" +``` + +--- + +### Task 3: Catalog loader with embed support + +**Files:** +- Create: `/Users/snider/Code/core/lint/pkg/lint/catalog.go` +- Create: `/Users/snider/Code/core/lint/pkg/lint/catalog_test.go` +- Create: `/Users/snider/Code/core/lint/catalog/go-security.yaml` (minimal test file) + +**Step 1: Create a minimal test catalog file** + +Create `/Users/snider/Code/core/lint/catalog/go-security.yaml`: +```yaml +- id: go-sec-001 + title: "SQL wildcard injection in LIKE clauses" + severity: high + languages: [go] + tags: [security, injection] + pattern: 'LIKE\s+\?\s*,\s*["%].*\+' + fix: "Use parameterised LIKE with EscapeLike()" + found_in: [go-store] + first_seen: "2026-03-09" + detection: regex +``` + +**Step 2: Write the failing test** + +```go +package lint + +import ( + "embed" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCatalog_LoadDir(t *testing.T) { + // Find the catalog/ dir relative to the module root + dir := filepath.Join("..", "..", "catalog") + cat, err := LoadDir(dir) + require.NoError(t, err) + assert.Greater(t, len(cat.Rules), 0) + assert.Equal(t, "go-sec-001", cat.Rules[0].ID) +} + +func TestCatalog_LoadDir_NotExist(t *testing.T) { + _, err := LoadDir("/nonexistent") + assert.Error(t, err) +} + +func TestCatalog_Filter_Language(t *testing.T) { + cat := &Catalog{Rules: []Rule{ + {ID: "go-001", Languages: []string{"go"}, Severity: "high"}, + {ID: "php-001", Languages: []string{"php"}, Severity: "high"}, + }} + filtered := cat.ForLanguage("go") + assert.Len(t, filtered, 1) + assert.Equal(t, "go-001", filtered[0].ID) +} + +func TestCatalog_Filter_Severity(t *testing.T) { + cat := &Catalog{Rules: []Rule{ + {ID: "a", Severity: "high"}, + {ID: "b", Severity: "low"}, + {ID: "c", Severity: "medium"}, + }} + filtered := cat.AtSeverity("medium") + assert.Len(t, filtered, 2) // high + medium +} + +func TestCatalog_LoadFS(t *testing.T) { + // Write temp yaml + dir := t.TempDir() + data := []byte(`- id: fs-001 + title: "FS test" + severity: low + languages: [go] + tags: [] + pattern: 'test' + fix: "fix" + detection: regex +`) + require.NoError(t, os.WriteFile(filepath.Join(dir, "test.yaml"), data, 0644)) + + cat, err := LoadDir(dir) + require.NoError(t, err) + assert.Len(t, cat.Rules, 1) +} +``` + +**Step 3: Write minimal implementation** + +```go +package lint + +import ( + "embed" + "fmt" + "io/fs" + "os" + "path/filepath" + "slices" + "strings" +) + +// Catalog holds a collection of lint rules loaded from YAML files. +type Catalog struct { + Rules []Rule +} + +// severityOrder maps severity names to numeric priority (higher = more severe). +var severityOrder = map[string]int{ + "critical": 5, + "high": 4, + "medium": 3, + "low": 2, + "info": 1, +} + +// LoadDir loads all .yaml files from a directory path into a Catalog. +func LoadDir(dir string) (*Catalog, error) { + entries, err := os.ReadDir(dir) + if err != nil { + return nil, fmt.Errorf("load catalog dir: %w", err) + } + + cat := &Catalog{} + for _, entry := range entries { + if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".yaml") { + continue + } + data, err := os.ReadFile(filepath.Join(dir, entry.Name())) + if err != nil { + return nil, fmt.Errorf("read %s: %w", entry.Name(), err) + } + rules, err := ParseRules(data) + if err != nil { + return nil, fmt.Errorf("parse %s: %w", entry.Name(), err) + } + cat.Rules = append(cat.Rules, rules...) + } + return cat, nil +} + +// LoadFS loads all .yaml files from an embed.FS into a Catalog. +func LoadFS(fsys embed.FS, dir string) (*Catalog, error) { + cat := &Catalog{} + err := fs.WalkDir(fsys, dir, func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() || !strings.HasSuffix(path, ".yaml") { + return nil + } + data, err := fsys.ReadFile(path) + if err != nil { + return fmt.Errorf("read %s: %w", path, err) + } + rules, err := ParseRules(data) + if err != nil { + return fmt.Errorf("parse %s: %w", path, err) + } + cat.Rules = append(cat.Rules, rules...) + return nil + }) + if err != nil { + return nil, err + } + return cat, nil +} + +// ForLanguage returns rules that apply to the given language. +func (c *Catalog) ForLanguage(lang string) []Rule { + var out []Rule + for _, r := range c.Rules { + if slices.Contains(r.Languages, lang) { + out = append(out, r) + } + } + return out +} + +// AtSeverity returns rules at or above the given severity threshold. +func (c *Catalog) AtSeverity(threshold string) []Rule { + minLevel := severityOrder[threshold] + var out []Rule + for _, r := range c.Rules { + if severityOrder[r.Severity] >= minLevel { + out = append(out, r) + } + } + return out +} + +// ByID returns a rule by its ID, or nil if not found. +func (c *Catalog) ByID(id string) *Rule { + for i := range c.Rules { + if c.Rules[i].ID == id { + return &c.Rules[i] + } + } + return nil +} +``` + +**Step 4: Run test to verify it passes** + +Run: `cd ~/Code/core/lint && go test ./pkg/lint/ -v` +Expected: PASS (all tests) + +**Step 5: Commit** + +```bash +git add pkg/lint/catalog.go pkg/lint/catalog_test.go catalog/go-security.yaml +git commit -m "feat: add Catalog loader with dir/embed/filter support" +``` + +--- + +### Task 4: Regex matcher + +**Files:** +- Create: `/Users/snider/Code/core/lint/pkg/lint/matcher.go` +- Create: `/Users/snider/Code/core/lint/pkg/lint/matcher_test.go` + +**Step 1: Write the failing test** + +```go +package lint + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestMatcher_Match(t *testing.T) { + rules := []Rule{ + { + ID: "test-001", + Title: "fmt.Println usage", + Severity: "low", + Languages: []string{"go"}, + Pattern: `fmt\.Println`, + Fix: "Use structured logging", + Detection: "regex", + }, + } + m, err := NewMatcher(rules) + require.NoError(t, err) + + content := `package main + +import "fmt" + +func main() { + fmt.Println("hello") +} +` + findings := m.Match("main.go", []byte(content)) + require.Len(t, findings, 1) + assert.Equal(t, "test-001", findings[0].RuleID) + assert.Equal(t, "main.go", findings[0].File) + assert.Equal(t, 6, findings[0].Line) + assert.Contains(t, findings[0].Match, "fmt.Println") +} + +func TestMatcher_ExcludePattern(t *testing.T) { + rules := []Rule{ + { + ID: "test-002", + Title: "Println with exclude", + Severity: "low", + Languages: []string{"go"}, + Pattern: `fmt\.Println`, + ExcludePattern: `// lint:ignore`, + Fix: "Use logging", + Detection: "regex", + }, + } + m, err := NewMatcher(rules) + require.NoError(t, err) + + content := `package main +func a() { fmt.Println("bad") } +func b() { fmt.Println("ok") // lint:ignore } +` + findings := m.Match("main.go", []byte(content)) + // Line 2 matches, line 3 is excluded + assert.Len(t, findings, 1) + assert.Equal(t, 2, findings[0].Line) +} + +func TestMatcher_NoMatch(t *testing.T) { + rules := []Rule{ + {ID: "test-003", Title: "T", Severity: "low", Languages: []string{"go"}, Pattern: `NEVER_MATCH_THIS`, Detection: "regex"}, + } + m, err := NewMatcher(rules) + require.NoError(t, err) + + findings := m.Match("main.go", []byte("package main\n")) + assert.Empty(t, findings) +} + +func TestMatcher_InvalidRegex(t *testing.T) { + rules := []Rule{ + {ID: "bad", Title: "T", Severity: "low", Languages: []string{"go"}, Pattern: `[invalid`, Detection: "regex"}, + } + _, err := NewMatcher(rules) + assert.Error(t, err) +} +``` + +**Step 2: Run test to verify it fails** + +Run: `cd ~/Code/core/lint && go test ./pkg/lint/ -v -run TestMatcher` +Expected: FAIL — `NewMatcher` not defined + +**Step 3: Write minimal implementation** + +```go +package lint + +import ( + "fmt" + "regexp" + "strings" +) + +// Finding represents a single match of a rule against source code. +type Finding struct { + RuleID string `json:"rule_id"` + Title string `json:"title"` + Severity string `json:"severity"` + File string `json:"file"` + Line int `json:"line"` + Match string `json:"match"` + Fix string `json:"fix"` + Repo string `json:"repo,omitempty"` +} + +// compiledRule is a rule with its regex pre-compiled. +type compiledRule struct { + rule Rule + pattern *regexp.Regexp + exclude *regexp.Regexp +} + +// Matcher runs compiled rules against file contents. +type Matcher struct { + rules []compiledRule +} + +// NewMatcher compiles all rule patterns and returns a Matcher. +func NewMatcher(rules []Rule) (*Matcher, error) { + compiled := make([]compiledRule, 0, len(rules)) + for _, r := range rules { + if r.Detection != "regex" { + continue // skip non-regex rules + } + p, err := regexp.Compile(r.Pattern) + if err != nil { + return nil, fmt.Errorf("rule %s: invalid pattern: %w", r.ID, err) + } + cr := compiledRule{rule: r, pattern: p} + if r.ExcludePattern != "" { + ex, err := regexp.Compile(r.ExcludePattern) + if err != nil { + return nil, fmt.Errorf("rule %s: invalid exclude_pattern: %w", r.ID, err) + } + cr.exclude = ex + } + compiled = append(compiled, cr) + } + return &Matcher{rules: compiled}, nil +} + +// Match checks file contents against all rules and returns findings. +func (m *Matcher) Match(filename string, content []byte) []Finding { + lines := strings.Split(string(content), "\n") + var findings []Finding + + for _, cr := range m.rules { + for i, line := range lines { + if !cr.pattern.MatchString(line) { + continue + } + if cr.exclude != nil && cr.exclude.MatchString(line) { + continue + } + findings = append(findings, Finding{ + RuleID: cr.rule.ID, + Title: cr.rule.Title, + Severity: cr.rule.Severity, + File: filename, + Line: i + 1, + Match: strings.TrimSpace(line), + Fix: cr.rule.Fix, + }) + } + } + return findings +} +``` + +**Step 4: Run test to verify it passes** + +Run: `cd ~/Code/core/lint && go test ./pkg/lint/ -v -run TestMatcher` +Expected: PASS (4 tests) + +**Step 5: Commit** + +```bash +git add pkg/lint/matcher.go pkg/lint/matcher_test.go +git commit -m "feat: add regex Matcher with exclude pattern support" +``` + +--- + +### Task 5: Report output (JSON, text, JSONL) + +**Files:** +- Create: `/Users/snider/Code/core/lint/pkg/lint/report.go` +- Create: `/Users/snider/Code/core/lint/pkg/lint/report_test.go` + +**Step 1: Write the failing test** + +```go +package lint + +import ( + "bytes" + "encoding/json" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestReport_JSON(t *testing.T) { + findings := []Finding{ + {RuleID: "x-001", Title: "Test", Severity: "high", File: "a.go", Line: 10, Match: "bad code", Fix: "fix it"}, + } + var buf bytes.Buffer + require.NoError(t, WriteJSON(&buf, findings)) + + var parsed []Finding + require.NoError(t, json.Unmarshal(buf.Bytes(), &parsed)) + assert.Len(t, parsed, 1) + assert.Equal(t, "x-001", parsed[0].RuleID) +} + +func TestReport_JSONL(t *testing.T) { + findings := []Finding{ + {RuleID: "a-001", File: "a.go", Line: 1}, + {RuleID: "b-001", File: "b.go", Line: 2}, + } + var buf bytes.Buffer + require.NoError(t, WriteJSONL(&buf, findings)) + + lines := strings.Split(strings.TrimSpace(buf.String()), "\n") + assert.Len(t, lines, 2) +} + +func TestReport_Text(t *testing.T) { + findings := []Finding{ + {RuleID: "x-001", Title: "Test rule", Severity: "high", File: "main.go", Line: 42, Match: "bad()", Fix: "use good()"}, + } + var buf bytes.Buffer + WriteText(&buf, findings) + + out := buf.String() + assert.Contains(t, out, "main.go:42") + assert.Contains(t, out, "x-001") + assert.Contains(t, out, "high") +} + +func TestReport_Summary(t *testing.T) { + findings := []Finding{ + {Severity: "high"}, + {Severity: "high"}, + {Severity: "low"}, + } + s := Summarise(findings) + assert.Equal(t, 3, s.Total) + assert.Equal(t, 2, s.BySeverity["high"]) + assert.Equal(t, 1, s.BySeverity["low"]) +} +``` + +**Step 2: Run test to verify it fails** + +Run: `cd ~/Code/core/lint && go test ./pkg/lint/ -v -run TestReport` +Expected: FAIL — functions not defined + +**Step 3: Write minimal implementation** + +```go +package lint + +import ( + "encoding/json" + "fmt" + "io" +) + +// Summary holds aggregate stats about findings. +type Summary struct { + Total int `json:"total"` + BySeverity map[string]int `json:"by_severity"` +} + +// Summarise creates a Summary from a list of findings. +func Summarise(findings []Finding) Summary { + s := Summary{ + Total: len(findings), + BySeverity: make(map[string]int), + } + for _, f := range findings { + s.BySeverity[f.Severity]++ + } + return s +} + +// WriteJSON writes findings as a JSON array. +func WriteJSON(w io.Writer, findings []Finding) error { + enc := json.NewEncoder(w) + enc.SetIndent("", " ") + return enc.Encode(findings) +} + +// WriteJSONL writes findings as newline-delimited JSON (one object per line). +// Compatible with ~/.core/ai/metrics/ format. +func WriteJSONL(w io.Writer, findings []Finding) error { + enc := json.NewEncoder(w) + for _, f := range findings { + if err := enc.Encode(f); err != nil { + return err + } + } + return nil +} + +// WriteText writes findings as human-readable text. +func WriteText(w io.Writer, findings []Finding) { + for _, f := range findings { + fmt.Fprintf(w, "%s:%d [%s] %s (%s)\n", f.File, f.Line, f.Severity, f.Title, f.RuleID) + if f.Fix != "" { + fmt.Fprintf(w, " fix: %s\n", f.Fix) + } + } +} +``` + +**Step 4: Run test to verify it passes** + +Run: `cd ~/Code/core/lint && go test ./pkg/lint/ -v -run TestReport` +Expected: PASS (4 tests) + +**Step 5: Commit** + +```bash +git add pkg/lint/report.go pkg/lint/report_test.go +git commit -m "feat: add report output (JSON, JSONL, text, summary)" +``` + +--- + +### Task 6: Scanner (walk files + match) + +**Files:** +- Create: `/Users/snider/Code/core/lint/pkg/lint/scanner.go` +- Create: `/Users/snider/Code/core/lint/pkg/lint/scanner_test.go` + +**Step 1: Write the failing test** + +```go +package lint + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestScanner_ScanDir(t *testing.T) { + // Set up temp dir with a .go file containing a known pattern + dir := t.TempDir() + goFile := filepath.Join(dir, "main.go") + require.NoError(t, os.WriteFile(goFile, []byte(`package main + +import "fmt" + +func main() { + fmt.Println("hello") +} +`), 0644)) + + rules := []Rule{ + {ID: "test-001", Title: "Println", Severity: "low", Languages: []string{"go"}, Pattern: `fmt\.Println`, Fix: "log", Detection: "regex"}, + } + + s, err := NewScanner(rules) + require.NoError(t, err) + + findings, err := s.ScanDir(dir) + require.NoError(t, err) + require.Len(t, findings, 1) + assert.Equal(t, "test-001", findings[0].RuleID) +} + +func TestScanner_ScanDir_ExcludesVendor(t *testing.T) { + dir := t.TempDir() + vendor := filepath.Join(dir, "vendor") + require.NoError(t, os.MkdirAll(vendor, 0755)) + require.NoError(t, os.WriteFile(filepath.Join(vendor, "lib.go"), []byte("package lib\nfunc x() { fmt.Println() }\n"), 0644)) + + rules := []Rule{ + {ID: "test-001", Title: "Println", Severity: "low", Languages: []string{"go"}, Pattern: `fmt\.Println`, Fix: "log", Detection: "regex"}, + } + + s, err := NewScanner(rules) + require.NoError(t, err) + + findings, err := s.ScanDir(dir) + require.NoError(t, err) + assert.Empty(t, findings) +} + +func TestScanner_LanguageDetection(t *testing.T) { + assert.Equal(t, "go", DetectLanguage("main.go")) + assert.Equal(t, "php", DetectLanguage("app.php")) + assert.Equal(t, "ts", DetectLanguage("index.ts")) + assert.Equal(t, "ts", DetectLanguage("index.tsx")) + assert.Equal(t, "cpp", DetectLanguage("engine.cpp")) + assert.Equal(t, "cpp", DetectLanguage("engine.cc")) + assert.Equal(t, "", DetectLanguage("README.md")) +} +``` + +**Step 2: Run test to verify it fails** + +Run: `cd ~/Code/core/lint && go test ./pkg/lint/ -v -run TestScanner` +Expected: FAIL — `NewScanner` not defined + +**Step 3: Write minimal implementation** + +```go +package lint + +import ( + "fmt" + "os" + "path/filepath" + "strings" +) + +// defaultExcludes are directories skipped during scanning. +var defaultExcludes = []string{"vendor", "node_modules", ".git", "testdata", ".core"} + +// extToLang maps file extensions to language identifiers. +var extToLang = map[string]string{ + ".go": "go", + ".php": "php", + ".ts": "ts", + ".tsx": "ts", + ".js": "js", + ".jsx": "js", + ".cpp": "cpp", + ".cc": "cpp", + ".cxx": "cpp", + ".c": "cpp", + ".h": "cpp", + ".hpp": "cpp", +} + +// DetectLanguage returns the language identifier for a filename, or "" if unknown. +func DetectLanguage(filename string) string { + ext := filepath.Ext(filename) + return extToLang[ext] +} + +// Scanner walks directories and matches files against rules. +type Scanner struct { + matcher *Matcher + rules []Rule + excludes []string +} + +// NewScanner creates a Scanner from a set of rules. +func NewScanner(rules []Rule) (*Scanner, error) { + m, err := NewMatcher(rules) + if err != nil { + return nil, err + } + return &Scanner{ + matcher: m, + rules: rules, + excludes: defaultExcludes, + }, nil +} + +// ScanDir walks a directory tree and returns all findings. +func (s *Scanner) ScanDir(root string) ([]Finding, error) { + var all []Finding + + err := filepath.WalkDir(root, func(path string, d os.DirEntry, err error) error { + if err != nil { + return err + } + + // Skip excluded directories + if d.IsDir() { + for _, ex := range s.excludes { + if d.Name() == ex { + return filepath.SkipDir + } + } + return nil + } + + // Only scan files with known language extensions + lang := DetectLanguage(path) + if lang == "" { + return nil + } + + content, err := os.ReadFile(path) + if err != nil { + return fmt.Errorf("read %s: %w", path, err) + } + + // Make path relative to root for cleaner output + rel, err := filepath.Rel(root, path) + if err != nil { + rel = path + } + + findings := s.matcher.Match(rel, content) + all = append(all, findings...) + return nil + }) + + return all, err +} + +// ScanFile scans a single file and returns findings. +func (s *Scanner) ScanFile(path string) ([]Finding, error) { + content, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("read %s: %w", path, err) + } + return s.matcher.Match(path, content), nil +} +``` + +**Step 4: Run test to verify it passes** + +Run: `cd ~/Code/core/lint && go test ./pkg/lint/ -v -run TestScanner` +Expected: PASS (3 tests) + +**Step 5: Commit** + +```bash +git add pkg/lint/scanner.go pkg/lint/scanner_test.go +git commit -m "feat: add Scanner with directory walking and language detection" +``` + +--- + +### Task 7: Seed the catalog YAML files + +**Files:** +- Create: `/Users/snider/Code/core/lint/catalog/go-security.yaml` (expand from task 3) +- Create: `/Users/snider/Code/core/lint/catalog/go-correctness.yaml` +- Create: `/Users/snider/Code/core/lint/catalog/go-modernise.yaml` + +**Step 1: Write `catalog/go-security.yaml`** + +```yaml +- id: go-sec-001 + title: "SQL wildcard injection in LIKE clauses" + severity: high + languages: [go] + tags: [security, injection, owasp-a03] + pattern: 'LIKE\s+\?.*["%`]\s*\%.*\+' + exclude_pattern: 'EscapeLike' + fix: "Use parameterised LIKE with explicit escaping of % and _ characters" + found_in: [go-store] + example_bad: | + db.Where("name LIKE ?", "%"+input+"%") + example_good: | + db.Where("name LIKE ?", EscapeLike(input)) + first_seen: "2026-03-09" + detection: regex + +- id: go-sec-002 + title: "Path traversal in file/cache key operations" + severity: high + languages: [go] + tags: [security, path-traversal, owasp-a01] + pattern: 'filepath\.Join\(.*,\s*\w+\)' + exclude_pattern: 'filepath\.Clean|securejoin|ValidatePath' + fix: "Validate path components do not contain .. before joining" + found_in: [go-cache] + example_bad: | + path := filepath.Join(cacheDir, userInput) + example_good: | + if strings.Contains(key, "..") { return ErrInvalidKey } + path := filepath.Join(cacheDir, key) + first_seen: "2026-03-09" + detection: regex + +- id: go-sec-003 + title: "XSS via unescaped HTML output" + severity: high + languages: [go] + tags: [security, xss, owasp-a03] + pattern: 'fmt\.Sprintf\(.*<.*>.*%s' + exclude_pattern: 'html\.EscapeString|template\.HTMLEscapeString' + fix: "Use html.EscapeString() for user-supplied values in HTML output" + found_in: [go-html] + example_bad: | + out := fmt.Sprintf("
%s
", userInput) + example_good: | + out := fmt.Sprintf("
%s
", html.EscapeString(userInput)) + first_seen: "2026-03-09" + detection: regex + +- id: go-sec-004 + title: "Non-constant-time comparison for authentication" + severity: high + languages: [go] + tags: [security, timing-attack, owasp-a02] + pattern: '==\s*\w*(token|key|secret|password|hash|digest|hmac|mac|sig)' + exclude_pattern: 'subtle\.ConstantTimeCompare|hmac\.Equal' + fix: "Use crypto/subtle.ConstantTimeCompare for security-sensitive comparisons" + found_in: [go-crypt] + example_bad: | + if providedToken == storedToken { + example_good: | + if subtle.ConstantTimeCompare([]byte(provided), []byte(stored)) == 1 { + first_seen: "2026-03-09" + detection: regex + +- id: go-sec-005 + title: "Log injection via unescaped newlines" + severity: medium + languages: [go] + tags: [security, injection, logging] + pattern: 'log\.\w+\(.*\+.*\)' + exclude_pattern: 'strings\.ReplaceAll.*\\n|slog\.' + fix: "Use structured logging (slog) or sanitise newlines from user input" + found_in: [go-log] + example_bad: | + log.Printf("user login: " + username) + example_good: | + slog.Info("user login", "username", username) + first_seen: "2026-03-09" + detection: regex + +- id: go-sec-006 + title: "Sensitive key material in log output" + severity: high + languages: [go] + tags: [security, secrets, logging] + pattern: 'log\.\w+\(.*(?i)(password|secret|token|apikey|private.?key|credential)' + exclude_pattern: 'REDACTED|\*\*\*|redact' + fix: "Redact sensitive fields before logging" + found_in: [go-log] + example_bad: | + log.Printf("config: token=%s", cfg.Token) + example_good: | + log.Printf("config: token=%s", redact(cfg.Token)) + first_seen: "2026-03-09" + detection: regex +``` + +**Step 2: Write `catalog/go-correctness.yaml`** + +```yaml +- id: go-cor-001 + title: "Goroutine without WaitGroup or context" + severity: high + languages: [go] + tags: [correctness, goroutine-leak] + pattern: 'go\s+func\s*\(' + exclude_pattern: 'wg\.|\.Go\(|context\.|done\s*<-|select\s*\{' + fix: "Use sync.WaitGroup.Go() or ensure goroutine has a shutdown signal" + found_in: [core/go] + example_bad: | + go func() { doWork() }() + example_good: | + wg.Go(func() { doWork() }) + first_seen: "2026-03-09" + detection: regex + +- id: go-cor-002 + title: "WaitGroup.Wait without context/timeout" + severity: high + languages: [go] + tags: [correctness, deadlock] + pattern: '\.Wait\(\)' + exclude_pattern: 'select\s*\{|ctx\.Done|context\.With|time\.After' + fix: "Wrap wg.Wait() in a select with context.Done() or timeout" + found_in: [core/go] + example_bad: | + wg.Wait() // blocks forever if goroutine hangs + example_good: | + done := make(chan struct{}) + go func() { wg.Wait(); close(done) }() + select { + case <-done: + case <-ctx.Done(): + } + first_seen: "2026-03-09" + detection: regex + +- id: go-cor-003 + title: "Silent error swallowing" + severity: medium + languages: [go] + tags: [correctness, error-handling] + pattern: '^\s*_\s*=\s*\w+\.\w+\(' + exclude_pattern: 'defer|Close\(|Flush\(' + fix: "Handle or propagate errors instead of discarding with _" + found_in: [go-process, go-ratelimit] + example_bad: | + _ = db.Save(record) + example_good: | + if err := db.Save(record); err != nil { + return fmt.Errorf("save record: %w", err) + } + first_seen: "2026-03-09" + detection: regex + +- id: go-cor-004 + title: "Panic in library code" + severity: medium + languages: [go] + tags: [correctness, panic] + pattern: '\bpanic\(' + exclude_pattern: '_test\.go|// unreachable|Must\w+\(' + fix: "Return errors instead of panicking in library code" + found_in: [go-i18n] + example_bad: | + func Parse(s string) *Node { panic("not implemented") } + example_good: | + func Parse(s string) (*Node, error) { return nil, fmt.Errorf("not implemented") } + first_seen: "2026-03-09" + detection: regex + +- id: go-cor-005 + title: "File deletion without path validation" + severity: high + languages: [go] + tags: [correctness, safety] + pattern: 'os\.Remove(All)?\(' + exclude_pattern: 'filepath\.Clean|ValidatePath|strings\.Contains.*\.\.' + fix: "Validate path does not escape base directory before deletion" + found_in: [go-io] + example_bad: | + os.RemoveAll(filepath.Join(base, userInput)) + example_good: | + clean := filepath.Clean(filepath.Join(base, userInput)) + if !strings.HasPrefix(clean, base) { return ErrPathTraversal } + os.RemoveAll(clean) + first_seen: "2026-03-09" + detection: regex + +- id: go-cor-006 + title: "Missing error return from API/network calls" + severity: medium + languages: [go] + tags: [correctness, error-handling] + pattern: 'resp,\s*_\s*:=.*\.(Get|Post|Do|Send)\(' + fix: "Check and handle HTTP/API errors" + found_in: [go-forge, go-git] + example_bad: | + resp, _ := client.Get(url) + example_good: | + resp, err := client.Get(url) + if err != nil { return fmt.Errorf("api call: %w", err) } + first_seen: "2026-03-09" + detection: regex + +- id: go-cor-007 + title: "Signal handler uses wrong type" + severity: medium + languages: [go] + tags: [correctness, signals] + pattern: 'syscall\.Signal\b' + exclude_pattern: 'os\.Signal' + fix: "Use os.Signal for portable signal handling" + found_in: [go-process] + example_bad: | + func Handle(sig syscall.Signal) { ... } + example_good: | + func Handle(sig os.Signal) { ... } + first_seen: "2026-03-09" + detection: regex +``` + +**Step 3: Write `catalog/go-modernise.yaml`** + +```yaml +- id: go-mod-001 + title: "Manual slice clone via append([]T(nil)...)" + severity: low + languages: [go] + tags: [modernise, go126] + pattern: 'append\(\[\]\w+\(nil\),\s*\w+\.\.\.\)' + fix: "Use slices.Clone() from Go 1.21+" + found_in: [core/go] + example_bad: | + copy := append([]string(nil), original...) + example_good: | + copy := slices.Clone(original) + first_seen: "2026-03-09" + detection: regex + +- id: go-mod-002 + title: "Manual sort of string/int slices" + severity: low + languages: [go] + tags: [modernise, go126] + pattern: 'sort\.Strings\(|sort\.Ints\(|sort\.Slice\(' + exclude_pattern: 'sort\.SliceStable' + fix: "Use slices.Sort() or slices.Sorted(iter) from Go 1.21+" + found_in: [core/go] + example_bad: | + sort.Strings(names) + example_good: | + slices.Sort(names) + first_seen: "2026-03-09" + detection: regex + +- id: go-mod-003 + title: "Manual reverse iteration loop" + severity: low + languages: [go] + tags: [modernise, go126] + pattern: 'for\s+\w+\s*:=\s*len\(\w+\)\s*-\s*1' + fix: "Use slices.Backward() from Go 1.23+" + found_in: [core/go] + example_bad: | + for i := len(items) - 1; i >= 0; i-- { use(items[i]) } + example_good: | + for _, item := range slices.Backward(items) { use(item) } + first_seen: "2026-03-09" + detection: regex + +- id: go-mod-004 + title: "WaitGroup Add+Done instead of Go()" + severity: low + languages: [go] + tags: [modernise, go126] + pattern: 'wg\.Add\(1\)' + fix: "Use sync.WaitGroup.Go() from Go 1.26" + found_in: [core/go] + example_bad: | + wg.Add(1) + go func() { defer wg.Done(); work() }() + example_good: | + wg.Go(func() { work() }) + first_seen: "2026-03-09" + detection: regex + +- id: go-mod-005 + title: "Manual map key collection" + severity: low + languages: [go] + tags: [modernise, go126] + pattern: 'for\s+\w+\s*:=\s*range\s+\w+\s*\{\s*\n\s*\w+\s*=\s*append' + exclude_pattern: 'maps\.Keys' + fix: "Use maps.Keys() or slices.Sorted(maps.Keys()) from Go 1.23+" + found_in: [core/go] + example_bad: | + var keys []string + for k := range m { keys = append(keys, k) } + example_good: | + keys := slices.Sorted(maps.Keys(m)) + first_seen: "2026-03-09" + detection: regex +``` + +**Step 4: Run all tests to verify catalog loads correctly** + +Run: `cd ~/Code/core/lint && go test ./pkg/lint/ -v` +Expected: PASS (all tests, including TestCatalog_LoadDir which reads the catalog/ dir) + +**Step 5: Commit** + +```bash +git add catalog/ +git commit -m "feat: seed catalog with 18 patterns from ecosystem sweep" +``` + +--- + +### Task 8: CLI binary with `cli.Main()` + +**Files:** +- Create: `/Users/snider/Code/core/lint/cmd/core-lint/main.go` +- Create: `/Users/snider/Code/core/lint/lint.go` (embed catalog + public API) + +**Step 1: Create the embed entry point** + +Create `/Users/snider/Code/core/lint/lint.go`: + +```go +package lint + +import ( + "embed" + + lintpkg "forge.lthn.ai/core/lint/pkg/lint" +) + +//go:embed catalog/*.yaml +var catalogFS embed.FS + +// LoadEmbeddedCatalog loads the built-in catalog from embedded YAML files. +func LoadEmbeddedCatalog() (*lintpkg.Catalog, error) { + return lintpkg.LoadFS(catalogFS, "catalog") +} +``` + +**Step 2: Create the CLI entry point** + +Create `/Users/snider/Code/core/lint/cmd/core-lint/main.go`: + +```go +package main + +import ( + "fmt" + "os" + + "forge.lthn.ai/core/cli/pkg/cli" + lint "forge.lthn.ai/core/lint" + lintpkg "forge.lthn.ai/core/lint/pkg/lint" +) + +func main() { + cli.Main( + cli.WithCommands("lint", addLintCommands), + ) +} + +func addLintCommands(root *cli.Command) { + lintCmd := &cli.Command{ + Use: "lint", + Short: "Pattern-based code checker", + } + root.AddCommand(lintCmd) + + // core-lint lint check [path...] + lintCmd.AddCommand(cli.NewCommand( + "check [path...]", + "Run pattern checks against source files", + "Scans files for known anti-patterns from the catalog", + func(cmd *cli.Command, args []string) error { + format, _ := cmd.Flags().GetString("format") + lang, _ := cmd.Flags().GetString("lang") + severity, _ := cmd.Flags().GetString("severity") + + cat, err := lint.LoadEmbeddedCatalog() + if err != nil { + return fmt.Errorf("load catalog: %w", err) + } + + rules := cat.Rules + if lang != "" { + rules = cat.ForLanguage(lang) + } + if severity != "" { + filtered := (&lintpkg.Catalog{Rules: rules}).AtSeverity(severity) + rules = filtered + } + + scanner, err := lintpkg.NewScanner(rules) + if err != nil { + return fmt.Errorf("create scanner: %w", err) + } + + paths := args + if len(paths) == 0 { + paths = []string{"."} + } + + var allFindings []lintpkg.Finding + for _, p := range paths { + findings, err := scanner.ScanDir(p) + if err != nil { + return fmt.Errorf("scan %s: %w", p, err) + } + allFindings = append(allFindings, findings...) + } + + switch format { + case "json": + return lintpkg.WriteJSON(os.Stdout, allFindings) + case "jsonl": + return lintpkg.WriteJSONL(os.Stdout, allFindings) + default: + lintpkg.WriteText(os.Stdout, allFindings) + } + + if len(allFindings) > 0 { + s := lintpkg.Summarise(allFindings) + fmt.Fprintf(os.Stderr, "\n%d findings", s.Total) + for sev, count := range s.BySeverity { + fmt.Fprintf(os.Stderr, " | %s: %d", sev, count) + } + fmt.Fprintln(os.Stderr) + } + return nil + }, + )) + + // Add flags to check command + checkCmd := lintCmd.Commands()[0] + checkCmd.Flags().StringP("format", "f", "text", "Output format: text, json, jsonl") + checkCmd.Flags().StringP("lang", "l", "", "Filter by language: go, php, ts, cpp") + checkCmd.Flags().StringP("severity", "s", "", "Minimum severity: critical, high, medium, low, info") + + // core-lint lint catalog + catalogCmd := &cli.Command{ + Use: "catalog", + Short: "Browse the pattern catalog", + } + lintCmd.AddCommand(catalogCmd) + + // core-lint lint catalog list + catalogCmd.AddCommand(cli.NewCommand( + "list", + "List available rules", + "", + func(cmd *cli.Command, args []string) error { + lang, _ := cmd.Flags().GetString("lang") + + cat, err := lint.LoadEmbeddedCatalog() + if err != nil { + return err + } + + rules := cat.Rules + if lang != "" { + rules = cat.ForLanguage(lang) + } + + for _, r := range rules { + fmt.Printf("%-12s [%s] %s\n", r.ID, r.Severity, r.Title) + } + fmt.Fprintf(os.Stderr, "\n%d rules\n", len(rules)) + return nil + }, + )) + catalogCmd.Commands()[0].Flags().StringP("lang", "l", "", "Filter by language") + + // core-lint lint catalog show + catalogCmd.AddCommand(cli.NewCommand( + "show [rule-id]", + "Show details for a specific rule", + "", + func(cmd *cli.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("rule ID required") + } + cat, err := lint.LoadEmbeddedCatalog() + if err != nil { + return err + } + r := cat.ByID(args[0]) + if r == nil { + return fmt.Errorf("rule %s not found", args[0]) + } + fmt.Printf("ID: %s\n", r.ID) + fmt.Printf("Title: %s\n", r.Title) + fmt.Printf("Severity: %s\n", r.Severity) + fmt.Printf("Languages: %v\n", r.Languages) + fmt.Printf("Tags: %v\n", r.Tags) + fmt.Printf("Pattern: %s\n", r.Pattern) + if r.ExcludePattern != "" { + fmt.Printf("Exclude: %s\n", r.ExcludePattern) + } + fmt.Printf("Fix: %s\n", r.Fix) + if r.ExampleBad != "" { + fmt.Printf("\nBad:\n%s\n", r.ExampleBad) + } + if r.ExampleGood != "" { + fmt.Printf("Good:\n%s\n", r.ExampleGood) + } + return nil + }, + )) +} +``` + +**Step 3: Add cli dependency** + +```bash +cd ~/Code/core/lint +go get forge.lthn.ai/core/cli +go mod tidy +``` + +**Step 4: Build and smoke test** + +```bash +cd ~/Code/core/lint +go build -o ./bin/core-lint ./cmd/core-lint +./bin/core-lint lint catalog list +./bin/core-lint lint catalog show go-sec-001 +./bin/core-lint lint check --lang go --format json ~/Code/host-uk/core/pkg/core/ +``` + +Expected: Binary builds, catalog lists 18 rules, show displays rule details, check scans files. + +**Step 5: Commit** + +```bash +git add lint.go cmd/core-lint/main.go go.mod go.sum +git commit -m "feat: add core-lint CLI with check, catalog list, catalog show" +``` + +--- + +### Task 9: Run all tests, push to forge + +**Step 1: Run full test suite** + +```bash +cd ~/Code/core/lint +go test -race -count=1 ./... +``` + +Expected: PASS with race detector + +**Step 2: Run go vet** + +```bash +go vet ./... +``` + +Expected: No issues + +**Step 3: Build binary** + +```bash +go build -trimpath -o ./bin/core-lint ./cmd/core-lint +``` + +**Step 4: Smoke test against a real repo** + +```bash +./bin/core-lint lint check --lang go ~/Code/host-uk/core/pkg/core/ +./bin/core-lint lint check --lang go --severity high ~/Code/core/go-io/ +``` + +Expected: Any findings are displayed (or no findings if the repos are already clean from our sweep) + +**Step 5: Update go.work** + +```bash +# Add ./core/lint to ~/Code/go.work if not already there +cd ~/Code && go work sync +``` + +**Step 6: Push to forge** + +```bash +cd ~/Code/core/lint +git push -u origin main +``` + +**Step 7: Tag initial release** + +```bash +git tag v0.1.0 +git push origin v0.1.0 +``` diff --git a/docs/plans/2026-03-12-altum-update-checker-design.md b/docs/plans/2026-03-12-altum-update-checker-design.md new file mode 100644 index 0000000..a0bbe0d --- /dev/null +++ b/docs/plans/2026-03-12-altum-update-checker-design.md @@ -0,0 +1,160 @@ +# AltumCode Update Checker — Design + +> **Note:** Layer 1 (version detection via PHP artisan) is implemented and documented at `docs/docs/php/packages/uptelligence.md`. Layer 2 (browser-automated downloads via Claude Code skill) is NOT yet implemented. + +## Problem + +Host UK runs 4 AltumCode SaaS products and 13 plugins across two marketplaces (CodeCanyon + LemonSqueezy). Checking for updates and downloading them is a manual process: ~50 clicks across two marketplace UIs, moving 16+ zip files, extracting to the right directories. This eats a morning of momentum every update cycle. + +## Solution + +Two-layer system: lightweight version detection (PHP artisan command) + browser-automated download (Claude Code skill). + +## Architecture + +``` +Layer 1: Detection (core/php-uptelligence) + artisan uptelligence:check-updates + 5 HTTP GETs, no auth, schedulable + Compares remote vs deployed versions + +Layer 2: Download (Claude Code skill) + Playwright → LemonSqueezy (16 items) + Claude in Chrome → CodeCanyon (2 items) + Downloads zips to staging folder + Extracts to saas/services/{product}/package/ + +Layer 3: Deploy (existing — manual) + docker build → scp → deploy_saas.yml + Human in the loop +``` + +## Layer 1: Version Detection + +### Public Endpoints (no auth required) + +| Endpoint | Returns | +|----------|---------| +| `GET https://66analytics.com/info.php` | `{"latest_release_version": "66.0.0", "latest_release_version_code": 6600}` | +| `GET https://66biolinks.com/info.php` | Same format | +| `GET https://66pusher.com/info.php` | Same format | +| `GET https://66socialproof.com/info.php` | Same format | +| `GET https://dev.altumcode.com/plugins-versions` | `{"affiliate": {"version": "2.0.1"}, "ultimate-blocks": {"version": "9.1.0"}, ...}` | + +### Deployed Version Sources + +- **Product version**: `PRODUCT_CODE` constant in deployed source `config.php` +- **Plugin versions**: `version` field in each plugin's `config.php` or `config.json` + +### Artisan Command + +`php artisan uptelligence:check-updates` + +Output: +``` +Product Deployed Latest Status +────────────────────────────────────────────── +66analytics 65.0.0 66.0.0 UPDATE AVAILABLE +66biolinks 65.0.0 66.0.0 UPDATE AVAILABLE +66pusher 65.0.0 65.0.0 ✓ current +66socialproof 65.0.0 66.0.0 UPDATE AVAILABLE + +Plugin Deployed Latest Status +────────────────────────────────────────────── +affiliate 2.0.0 2.0.1 UPDATE AVAILABLE +ultimate-blocks 9.1.0 9.1.0 ✓ current +... +``` + +Lives in `core/php-uptelligence` as a scheduled check or on-demand command. + +## Layer 2: Browser-Automated Download + +### Claude Code Skill: `/update-altum` + +Workflow: +1. Run version check (Layer 1) — show what needs updating +2. Ask for confirmation before downloading +3. Download from both marketplaces +4. Extract to staging directories +5. Report what changed + +### Marketplace Access + +**LemonSqueezy (Playwright)** +- Auth: Magic link email to `snider@lt.hn` — user taps on phone +- Flow per item: Navigate to order detail → click "Download" button +- 16 items across 2 pages of orders +- Session persists for the skill invocation + +**CodeCanyon (Claude in Chrome)** +- Auth: Saved browser session cookies (user `snidered`) +- Flow per item: Click "Download" dropdown → "All files & documentation" +- 2 items on downloads page + +### Product-to-Marketplace Mapping + +| Product | CodeCanyon | LemonSqueezy | +|---------|-----------|--------------| +| 66biolinks | Regular licence | Extended licence (66biolinks custom, $359.28) | +| 66socialproof | Regular licence | — | +| 66analytics | — | Regular licence | +| 66pusher | — | Regular licence | + +### Plugin Inventory (all LemonSqueezy) + +| Plugin | Price | Applies To | +|--------|-------|------------| +| Pro Notifications | $58.80 | 66socialproof | +| Teams Plugin | $58.80 | All products | +| Push Notifications Plugin | $46.80 | All products | +| Ultimate Blocks | $32.40 | 66biolinks | +| Pro Blocks | $32.40 | 66biolinks | +| Payment Blocks | $32.40 | 66biolinks | +| Affiliate Plugin | $32.40 | All products | +| PWA Plugin | $25.20 | All products | +| Image Optimizer Plugin | $19.20 | All products | +| Email Shield Plugin | FREE | All products | +| Dynamic OG images plugin | FREE | 66biolinks | +| Offload & CDN Plugin | FREE | All products (gift from Altum) | + +### Staging & Extraction + +- Download to: `~/Code/lthn/saas/updates/YYYY-MM-DD/` +- Products extract to: `~/Code/lthn/saas/services/{product}/package/product/` +- Plugins extract to: `~/Code/lthn/saas/services/{product}/package/product/plugins/{plugin_id}/` + +## LemonSqueezy Order UUIDs + +Stable order URLs for direct navigation: + +| Product | Order URL | +|---------|-----------| +| 66analytics | `/my-orders/2972471f-abac-4165-b78d-541b176de180` | + +(Remaining UUIDs to be captured on first full run of the skill.) + +## Out of Scope + +- No auto-deploy to production (human runs `deploy_saas.yml`) +- No licence key handling or financial transactions +- No AltumCode Club membership management +- No Blesta updates (different vendor) +- No update SQL migration execution (handled by AltumCode's own update scripts) + +## Key Technical Details + +- AltumCode products use Unirest HTTP client for API calls +- Product `info.php` endpoints are public, no rate limiting observed +- Plugin versions endpoint (`dev.altumcode.com`) is also public +- Production Docker images have `/install/` and `/update/` directories stripped +- Updates require full Docker image rebuild and redeployment via Ansible +- CodeCanyon download URLs contain stable purchase UUIDs +- LemonSqueezy uses magic link auth (no password, email-based) +- Playwright can access LemonSqueezy; Claude in Chrome cannot (payment platform safety block) + +## Workflow Summary + +**Before**: Get email from AltumCode → log into 2 marketplaces → click through 18 products/plugins → download 16+ zips → extract to right directories → rebuild Docker images → deploy. Half a morning. + +**After**: Run `artisan uptelligence:check-updates` → see what's behind → invoke `/update-altum` → tap magic link on phone → go make coffee → come back to staged files → `deploy_saas.yml`. 10 minutes of human time. diff --git a/docs/plans/2026-03-12-altum-update-checker-plan.md b/docs/plans/2026-03-12-altum-update-checker-plan.md new file mode 100644 index 0000000..37ecb28 --- /dev/null +++ b/docs/plans/2026-03-12-altum-update-checker-plan.md @@ -0,0 +1,799 @@ +# AltumCode Update Checker Implementation Plan + +> **Note:** Layer 1 (Tasks 1-2, 4: version checking + seeder + sync command) is implemented and documented at `docs/docs/php/packages/uptelligence.md`. Task 3 (Claude Code browser skill for Layer 2 downloads) is NOT yet implemented. + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Add AltumCode product + plugin version checking to uptelligence, and create a Claude Code skill for browser-automated downloads from LemonSqueezy and CodeCanyon. + +**Architecture:** Extend the existing `VendorUpdateCheckerService` to handle `PLATFORM_ALTUM` vendors via 5 public HTTP endpoints. Seed the vendors table with all 4 products and 13 plugins. Create a Claude Code plugin skill that uses Playwright (LemonSqueezy) and Chrome (CodeCanyon) to download updates. + +**Tech Stack:** PHP 8.4, Laravel, Pest, Claude Code plugins (Playwright MCP + Chrome MCP) + +--- + +### Task 1: Add AltumCode check to VendorUpdateCheckerService + +**Files:** +- Modify: `/Users/snider/Code/core/php-uptelligence/Services/VendorUpdateCheckerService.php` +- Test: `/Users/snider/Code/core/php-uptelligence/tests/Unit/AltumCodeCheckerTest.php` + +**Step 1: Write the failing test** + +Create `/Users/snider/Code/core/php-uptelligence/tests/Unit/AltumCodeCheckerTest.php`: + +```php +service = app(VendorUpdateCheckerService::class); +}); + +it('checks altum product version via info.php', function () { + Http::fake([ + 'https://66analytics.com/info.php' => Http::response([ + 'latest_release_version' => '66.0.0', + 'latest_release_version_code' => 6600, + ]), + ]); + + $vendor = Vendor::factory()->create([ + 'slug' => '66analytics', + 'name' => '66analytics', + 'source_type' => Vendor::SOURCE_LICENSED, + 'plugin_platform' => Vendor::PLATFORM_ALTUM, + 'current_version' => '65.0.0', + 'is_active' => true, + ]); + + $result = $this->service->checkVendor($vendor); + + expect($result['status'])->toBe('success') + ->and($result['current'])->toBe('65.0.0') + ->and($result['latest'])->toBe('66.0.0') + ->and($result['has_update'])->toBeTrue(); +}); + +it('reports no update when altum product is current', function () { + Http::fake([ + 'https://66analytics.com/info.php' => Http::response([ + 'latest_release_version' => '65.0.0', + 'latest_release_version_code' => 6500, + ]), + ]); + + $vendor = Vendor::factory()->create([ + 'slug' => '66analytics', + 'name' => '66analytics', + 'source_type' => Vendor::SOURCE_LICENSED, + 'plugin_platform' => Vendor::PLATFORM_ALTUM, + 'current_version' => '65.0.0', + 'is_active' => true, + ]); + + $result = $this->service->checkVendor($vendor); + + expect($result['has_update'])->toBeFalse(); +}); + +it('checks altum plugin versions via plugins-versions endpoint', function () { + Http::fake([ + 'https://dev.altumcode.com/plugins-versions' => Http::response([ + 'affiliate' => ['version' => '2.0.1'], + 'teams' => ['version' => '3.0.0'], + ]), + ]); + + $vendor = Vendor::factory()->create([ + 'slug' => 'altum-plugin-affiliate', + 'name' => 'Affiliate Plugin', + 'source_type' => Vendor::SOURCE_PLUGIN, + 'plugin_platform' => Vendor::PLATFORM_ALTUM, + 'current_version' => '2.0.0', + 'is_active' => true, + ]); + + $result = $this->service->checkVendor($vendor); + + expect($result['status'])->toBe('success') + ->and($result['latest'])->toBe('2.0.1') + ->and($result['has_update'])->toBeTrue(); +}); + +it('handles altum info.php timeout gracefully', function () { + Http::fake([ + 'https://66analytics.com/info.php' => Http::response('', 500), + ]); + + $vendor = Vendor::factory()->create([ + 'slug' => '66analytics', + 'name' => '66analytics', + 'source_type' => Vendor::SOURCE_LICENSED, + 'plugin_platform' => Vendor::PLATFORM_ALTUM, + 'current_version' => '65.0.0', + 'is_active' => true, + ]); + + $result = $this->service->checkVendor($vendor); + + expect($result['status'])->toBe('error') + ->and($result['has_update'])->toBeFalse(); +}); +``` + +**Step 2: Run test to verify it fails** + +Run: `cd /Users/snider/Code/core/php-uptelligence && composer test -- --filter=AltumCodeChecker` +Expected: FAIL — altum vendors still hit `skipCheck()` + +**Step 3: Write minimal implementation** + +In `/Users/snider/Code/core/php-uptelligence/Services/VendorUpdateCheckerService.php`, modify `checkVendor()` to route altum vendors: + +```php +public function checkVendor(Vendor $vendor): array +{ + $result = match (true) { + $this->isAltumPlatform($vendor) && $vendor->isLicensed() => $this->checkAltumProduct($vendor), + $this->isAltumPlatform($vendor) && $vendor->isPlugin() => $this->checkAltumPlugin($vendor), + $vendor->isOss() && $this->isGitHubUrl($vendor->git_repo_url) => $this->checkGitHub($vendor), + $vendor->isOss() && $this->isGiteaUrl($vendor->git_repo_url) => $this->checkGitea($vendor), + default => $this->skipCheck($vendor), + }; + + // ... rest unchanged +} +``` + +Add the three new methods: + +```php +/** + * Check if vendor is on the AltumCode platform. + */ +protected function isAltumPlatform(Vendor $vendor): bool +{ + return $vendor->plugin_platform === Vendor::PLATFORM_ALTUM; +} + +/** + * AltumCode product info endpoint mapping. + */ +protected function getAltumProductInfoUrl(Vendor $vendor): ?string +{ + $urls = [ + '66analytics' => 'https://66analytics.com/info.php', + '66biolinks' => 'https://66biolinks.com/info.php', + '66pusher' => 'https://66pusher.com/info.php', + '66socialproof' => 'https://66socialproof.com/info.php', + ]; + + return $urls[$vendor->slug] ?? null; +} + +/** + * Check an AltumCode product for updates via its info.php endpoint. + */ +protected function checkAltumProduct(Vendor $vendor): array +{ + $url = $this->getAltumProductInfoUrl($vendor); + if (! $url) { + return $this->errorResult("No info.php URL mapped for {$vendor->slug}"); + } + + try { + $response = Http::timeout(5)->get($url); + + if (! $response->successful()) { + return $this->errorResult("AltumCode info.php returned {$response->status()}"); + } + + $data = $response->json(); + $latestVersion = $data['latest_release_version'] ?? null; + + if (! $latestVersion) { + return $this->errorResult('No version in info.php response'); + } + + return $this->buildResult( + vendor: $vendor, + latestVersion: $this->normaliseVersion($latestVersion), + releaseInfo: [ + 'version_code' => $data['latest_release_version_code'] ?? null, + 'source' => $url, + ] + ); + } catch (\Exception $e) { + return $this->errorResult("AltumCode check failed: {$e->getMessage()}"); + } +} + +/** + * Check an AltumCode plugin for updates via the central plugins-versions endpoint. + */ +protected function checkAltumPlugin(Vendor $vendor): array +{ + try { + $allPlugins = $this->getAltumPluginVersions(); + + if ($allPlugins === null) { + return $this->errorResult('Failed to fetch AltumCode plugin versions'); + } + + // Extract the plugin_id from the vendor slug (strip 'altum-plugin-' prefix) + $pluginId = str_replace('altum-plugin-', '', $vendor->slug); + + if (! isset($allPlugins[$pluginId])) { + return $this->errorResult("Plugin '{$pluginId}' not found in AltumCode registry"); + } + + $latestVersion = $allPlugins[$pluginId]['version'] ?? null; + + return $this->buildResult( + vendor: $vendor, + latestVersion: $this->normaliseVersion($latestVersion), + releaseInfo: ['source' => 'dev.altumcode.com/plugins-versions'] + ); + } catch (\Exception $e) { + return $this->errorResult("AltumCode plugin check failed: {$e->getMessage()}"); + } +} + +/** + * Fetch all AltumCode plugin versions (cached for 1 hour within a check run). + */ +protected ?array $altumPluginVersionsCache = null; + +protected function getAltumPluginVersions(): ?array +{ + if ($this->altumPluginVersionsCache !== null) { + return $this->altumPluginVersionsCache; + } + + $response = Http::timeout(5)->get('https://dev.altumcode.com/plugins-versions'); + + if (! $response->successful()) { + return null; + } + + $this->altumPluginVersionsCache = $response->json(); + + return $this->altumPluginVersionsCache; +} +``` + +**Step 4: Run test to verify it passes** + +Run: `cd /Users/snider/Code/core/php-uptelligence && composer test -- --filter=AltumCodeChecker` +Expected: PASS (4 tests) + +**Step 5: Commit** + +```bash +cd /Users/snider/Code/core/php-uptelligence +git add Services/VendorUpdateCheckerService.php tests/Unit/AltumCodeCheckerTest.php +git commit -m "feat: add AltumCode product + plugin version checking + +Extends VendorUpdateCheckerService to check AltumCode products via +their info.php endpoints and plugins via dev.altumcode.com/plugins-versions. +No auth required — all endpoints are public. + +Co-Authored-By: Virgil " +``` + +--- + +### Task 2: Seed AltumCode vendors + +**Files:** +- Create: `/Users/snider/Code/core/php-uptelligence/database/seeders/AltumCodeVendorSeeder.php` +- Test: `/Users/snider/Code/core/php-uptelligence/tests/Unit/AltumCodeVendorSeederTest.php` + +**Step 1: Write the failing test** + +Create `/Users/snider/Code/core/php-uptelligence/tests/Unit/AltumCodeVendorSeederTest.php`: + +```php +artisan('db:seed', ['--class' => 'Core\\Mod\\Uptelligence\\Database\\Seeders\\AltumCodeVendorSeeder']); + + expect(Vendor::where('source_type', Vendor::SOURCE_LICENSED) + ->where('plugin_platform', Vendor::PLATFORM_ALTUM) + ->count() + )->toBe(4); +}); + +it('seeds 13 altum plugins', function () { + $this->artisan('db:seed', ['--class' => 'Core\\Mod\\Uptelligence\\Database\\Seeders\\AltumCodeVendorSeeder']); + + expect(Vendor::where('source_type', Vendor::SOURCE_PLUGIN) + ->where('plugin_platform', Vendor::PLATFORM_ALTUM) + ->count() + )->toBe(13); +}); + +it('is idempotent', function () { + $this->artisan('db:seed', ['--class' => 'Core\\Mod\\Uptelligence\\Database\\Seeders\\AltumCodeVendorSeeder']); + $this->artisan('db:seed', ['--class' => 'Core\\Mod\\Uptelligence\\Database\\Seeders\\AltumCodeVendorSeeder']); + + expect(Vendor::where('plugin_platform', Vendor::PLATFORM_ALTUM)->count())->toBe(17); +}); +``` + +**Step 2: Run test to verify it fails** + +Run: `cd /Users/snider/Code/core/php-uptelligence && composer test -- --filter=AltumCodeVendorSeeder` +Expected: FAIL — seeder class not found + +**Step 3: Write minimal implementation** + +Create `/Users/snider/Code/core/php-uptelligence/database/seeders/AltumCodeVendorSeeder.php`: + +```php + '66analytics', 'name' => '66analytics', 'vendor_name' => 'AltumCode', 'current_version' => '65.0.0'], + ['slug' => '66biolinks', 'name' => '66biolinks', 'vendor_name' => 'AltumCode', 'current_version' => '65.0.0'], + ['slug' => '66pusher', 'name' => '66pusher', 'vendor_name' => 'AltumCode', 'current_version' => '65.0.0'], + ['slug' => '66socialproof', 'name' => '66socialproof', 'vendor_name' => 'AltumCode', 'current_version' => '65.0.0'], + ]; + + foreach ($products as $product) { + Vendor::updateOrCreate( + ['slug' => $product['slug']], + [ + ...$product, + 'source_type' => Vendor::SOURCE_LICENSED, + 'plugin_platform' => Vendor::PLATFORM_ALTUM, + 'is_active' => true, + ] + ); + } + + $plugins = [ + ['slug' => 'altum-plugin-affiliate', 'name' => 'Affiliate Plugin', 'current_version' => '2.0.0'], + ['slug' => 'altum-plugin-push-notifications', 'name' => 'Push Notifications Plugin', 'current_version' => '1.0.0'], + ['slug' => 'altum-plugin-teams', 'name' => 'Teams Plugin', 'current_version' => '1.0.0'], + ['slug' => 'altum-plugin-pwa', 'name' => 'PWA Plugin', 'current_version' => '1.0.0'], + ['slug' => 'altum-plugin-image-optimizer', 'name' => 'Image Optimizer Plugin', 'current_version' => '3.1.0'], + ['slug' => 'altum-plugin-email-shield', 'name' => 'Email Shield Plugin', 'current_version' => '1.0.0'], + ['slug' => 'altum-plugin-dynamic-og-images', 'name' => 'Dynamic OG Images Plugin', 'current_version' => '1.0.0'], + ['slug' => 'altum-plugin-offload', 'name' => 'Offload & CDN Plugin', 'current_version' => '1.0.0'], + ['slug' => 'altum-plugin-payment-blocks', 'name' => 'Payment Blocks Plugin', 'current_version' => '1.0.0'], + ['slug' => 'altum-plugin-ultimate-blocks', 'name' => 'Ultimate Blocks Plugin', 'current_version' => '9.1.0'], + ['slug' => 'altum-plugin-pro-blocks', 'name' => 'Pro Blocks Plugin', 'current_version' => '1.0.0'], + ['slug' => 'altum-plugin-pro-notifications', 'name' => 'Pro Notifications Plugin', 'current_version' => '1.0.0'], + ['slug' => 'altum-plugin-aix', 'name' => 'AIX Plugin', 'current_version' => '1.0.0'], + ]; + + foreach ($plugins as $plugin) { + Vendor::updateOrCreate( + ['slug' => $plugin['slug']], + [ + ...$plugin, + 'vendor_name' => 'AltumCode', + 'source_type' => Vendor::SOURCE_PLUGIN, + 'plugin_platform' => Vendor::PLATFORM_ALTUM, + 'is_active' => true, + ] + ); + } + } +} +``` + +**Step 4: Run test to verify it passes** + +Run: `cd /Users/snider/Code/core/php-uptelligence && composer test -- --filter=AltumCodeVendorSeeder` +Expected: PASS (3 tests) + +**Step 5: Commit** + +```bash +cd /Users/snider/Code/core/php-uptelligence +git add database/seeders/AltumCodeVendorSeeder.php tests/Unit/AltumCodeVendorSeederTest.php +git commit -m "feat: seed AltumCode vendors — 4 products + 13 plugins + +Idempotent seeder using updateOrCreate. Products are SOURCE_LICENSED, +plugins are SOURCE_PLUGIN, all PLATFORM_ALTUM. Version numbers will +need updating to match actual deployed versions. + +Co-Authored-By: Virgil " +``` + +--- + +### Task 3: Create Claude Code plugin skill for downloads + +**Files:** +- Create: `/Users/snider/.claude/plugins/altum-updater/plugin.json` +- Create: `/Users/snider/.claude/plugins/altum-updater/skills/update-altum.md` + +**Step 1: Create plugin manifest** + +Create `/Users/snider/.claude/plugins/altum-updater/plugin.json`: + +```json +{ + "name": "altum-updater", + "description": "Download AltumCode product and plugin updates from LemonSqueezy and CodeCanyon", + "version": "0.1.0", + "skills": [ + { + "name": "update-altum", + "path": "skills/update-altum.md", + "description": "Download AltumCode product and plugin updates from marketplaces. Use when the user mentions updating AltumCode products, downloading from LemonSqueezy or CodeCanyon, or running the update checker." + } + ] +} +``` + +**Step 2: Create skill file** + +Create `/Users/snider/.claude/plugins/altum-updater/skills/update-altum.md`: + +```markdown +--- +name: update-altum +description: Download AltumCode product and plugin updates from LemonSqueezy and CodeCanyon +--- + +# AltumCode Update Downloader + +## Overview + +Downloads updated AltumCode products and plugins from two marketplaces: +- **LemonSqueezy** (Playwright): 66analytics, 66pusher, 66biolinks (extended), 13 plugins +- **CodeCanyon** (Claude in Chrome): 66biolinks (regular), 66socialproof + +## Pre-flight + +1. Run `php artisan uptelligence:check-updates --vendor=66analytics` (or check all) to see what needs updating +2. Show the user the version comparison table +3. Ask which products/plugins to download + +## LemonSqueezy Download Flow (Playwright) + +LemonSqueezy uses magic link auth. The user will need to tap the link on their phone. + +1. Navigate to `https://app.lemonsqueezy.com/my-orders` +2. If on login page, fill email `snider@lt.hn` and click Sign In +3. Tell user: "Magic link sent — tap the link on your phone" +4. Wait for redirect to orders page +5. For each product/plugin that needs updating: + a. Click the product link on the orders page (paginated — 10 per page, 2 pages) + b. In the order detail, find the "Download" button under "Files" + c. Click Download — file saves to default downloads folder +6. Move downloaded zips to staging: `~/Code/lthn/saas/updates/YYYY-MM-DD/` + +### LemonSqueezy Product Names (as shown on orders page) + +| Our Name | LemonSqueezy Order Name | +|----------|------------------------| +| 66analytics | "66analytics - Regular License" | +| 66pusher | "66pusher - Regular License" | +| 66biolinks (extended) | "66biolinks custom" | +| Affiliate Plugin | "Affiliate Plugin" | +| Push Notifications Plugin | "Push Notifications Plugin" | +| Teams Plugin | "Teams Plugin" | +| PWA Plugin | "PWA Plugin" | +| Image Optimizer Plugin | "Image Optimizer Plugin" | +| Email Shield Plugin | "Email Shield Plugin" | +| Dynamic OG Images | "Dynamic OG images plugin" | +| Offload & CDN | "Offload & CDN Plugin" | +| Payment Blocks | "Payment Blocks - 66biolinks plugin" | +| Ultimate Blocks | "Ultimate Blocks - 66biolinks plugin" | +| Pro Blocks | "Pro Blocks - 66biolinks plugin" | +| Pro Notifications | "Pro Notifications - 66socialproof plugin" | +| AltumCode Club | "The AltumCode Club" | + +## CodeCanyon Download Flow (Claude in Chrome) + +CodeCanyon uses saved browser session cookies (user: snidered). + +1. Navigate to `https://codecanyon.net/downloads` +2. Dismiss cookie banner if present (click "Reject all") +3. For 66socialproof: + a. Find "66socialproof" Download button + b. Click the dropdown arrow + c. Click "All files & documentation" +4. For 66biolinks: + a. Find "66biolinks" Download button (scroll down) + b. Click the dropdown arrow + c. Click "All files & documentation" +5. Move downloaded zips to staging + +### CodeCanyon Download URLs (stable) + +- 66socialproof: `/user/snidered/download_purchase/8d8ef4c1-5add-4eba-9a89-4261a9c87e0b` +- 66biolinks: `/user/snidered/download_purchase/38d79f4e-19cd-480a-b068-4332629b5206` + +## Post-Download + +1. List all zips in staging folder +2. For each product zip: + - Extract to `~/Code/lthn/saas/services/{product}/package/product/` +3. For each plugin zip: + - Extract to the correct product's `plugins/{plugin_id}/` directory + - Note: Some plugins apply to multiple products (affiliate, teams, etc.) +4. Show summary of what was updated +5. Remind user: "Files staged. Run `deploy_saas.yml` when ready to deploy." + +## Important Notes + +- Never make purchases or enter financial information +- LemonSqueezy session expires — if Playwright gets a login page mid-flow, re-trigger magic link +- CodeCanyon session depends on Chrome cookies — if logged out, tell user to log in manually +- The AltumCode Club subscription is NOT a downloadable product — skip it +- Plugin `aix` may not appear on LemonSqueezy (bundled with products) — skip if not found +``` + +**Step 3: Verify plugin loads** + +Run: `claude` in a new terminal, then type `/update-altum` to verify the skill is discovered. + +**Step 4: Commit** + +```bash +cd /Users/snider/.claude/plugins/altum-updater +git init +git add plugin.json skills/update-altum.md +git commit -m "feat: altum-updater Claude Code plugin — marketplace download skill + +Playwright for LemonSqueezy, Chrome for CodeCanyon. Includes full +product/plugin mapping and download flow documentation. + +Co-Authored-By: Virgil " +``` + +--- + +### Task 4: Sync deployed plugin versions from source + +**Files:** +- Create: `/Users/snider/Code/core/php-uptelligence/Console/SyncAltumVersionsCommand.php` +- Modify: `/Users/snider/Code/core/php-uptelligence/Boot.php` (register command) +- Test: `/Users/snider/Code/core/php-uptelligence/tests/Unit/SyncAltumVersionsCommandTest.php` + +**Step 1: Write the failing test** + +```php +artisan('uptelligence:sync-altum-versions', ['--dry-run' => true]) + ->assertExitCode(0); +}); +``` + +**Step 2: Run test to verify it fails** + +Run: `cd /Users/snider/Code/core/php-uptelligence && composer test -- --filter=SyncAltumVersions` +Expected: FAIL — command not found + +**Step 3: Write minimal implementation** + +Create `/Users/snider/Code/core/php-uptelligence/Console/SyncAltumVersionsCommand.php`: + +```php + '66analytics/package/product', + '66biolinks' => '66biolinks/package/product', + '66pusher' => '66pusher/package/product', + '66socialproof' => '66socialproof/package/product', + ]; + + public function handle(): int + { + $basePath = $this->option('path') + ?? env('SAAS_SERVICES_PATH', base_path('../lthn/saas/services')); + $dryRun = $this->option('dry-run'); + + $this->info('Syncing AltumCode versions from source...'); + $this->newLine(); + + $updates = []; + + // Sync product versions + foreach ($this->productPaths as $slug => $relativePath) { + $productPath = rtrim($basePath, '/') . '/' . $relativePath; + $version = $this->readProductVersion($productPath); + + if ($version) { + $updates[] = $this->syncVendorVersion($slug, $version, $dryRun); + } else { + $this->warn(" Could not read version for {$slug} at {$productPath}"); + } + } + + // Sync plugin versions — read from biolinks as canonical source + $biolinkPluginsPath = rtrim($basePath, '/') . '/66biolinks/package/product/plugins'; + if (is_dir($biolinkPluginsPath)) { + foreach (glob($biolinkPluginsPath . '/*/config.php') as $configFile) { + $pluginId = basename(dirname($configFile)); + $version = $this->readPluginVersion($configFile); + + if ($version) { + $slug = "altum-plugin-{$pluginId}"; + $updates[] = $this->syncVendorVersion($slug, $version, $dryRun); + } + } + } + + // Output table + $this->table( + ['Vendor', 'Old Version', 'New Version', 'Status'], + array_filter($updates) + ); + + if ($dryRun) { + $this->warn('Dry run — no changes written.'); + } + + return self::SUCCESS; + } + + protected function readProductVersion(string $productPath): ?string + { + // Read version from app/init.php or similar — look for PRODUCT_VERSION define + $initFile = $productPath . '/app/init.php'; + if (! file_exists($initFile)) { + return null; + } + + $content = file_get_contents($initFile); + if (preg_match("/define\('PRODUCT_VERSION',\s*'([^']+)'\)/", $content, $matches)) { + return $matches[1]; + } + + return null; + } + + protected function readPluginVersion(string $configFile): ?string + { + if (! file_exists($configFile)) { + return null; + } + + $content = file_get_contents($configFile); + + // PHP config format: 'version' => '2.0.0' + if (preg_match("/'version'\s*=>\s*'([^']+)'/", $content, $matches)) { + return $matches[1]; + } + + return null; + } + + protected function syncVendorVersion(string $slug, string $version, bool $dryRun): ?array + { + $vendor = Vendor::where('slug', $slug)->first(); + if (! $vendor) { + return [$slug, '(not in DB)', $version, 'SKIPPED']; + } + + $oldVersion = $vendor->current_version; + if ($oldVersion === $version) { + return [$slug, $oldVersion, $version, 'current']; + } + + if (! $dryRun) { + $vendor->update(['current_version' => $version]); + } + + return [$slug, $oldVersion ?? '(none)', $version, $dryRun ? 'WOULD UPDATE' : 'UPDATED']; + } +} +``` + +Register in Boot.php — add to `onConsole()`: + +```php +$event->command(Console\SyncAltumVersionsCommand::class); +``` + +**Step 4: Run test to verify it passes** + +Run: `cd /Users/snider/Code/core/php-uptelligence && composer test -- --filter=SyncAltumVersions` +Expected: PASS + +**Step 5: Commit** + +```bash +cd /Users/snider/Code/core/php-uptelligence +git add Console/SyncAltumVersionsCommand.php Boot.php tests/Unit/SyncAltumVersionsCommandTest.php +git commit -m "feat: sync deployed AltumCode versions from source files + +Reads PRODUCT_VERSION from product init.php and plugin versions from +config.php files. Updates uptelligence_vendors table so check-updates +knows what's actually deployed. + +Co-Authored-By: Virgil " +``` + +--- + +### Task 5: End-to-end verification + +**Step 1: Seed vendors on local dev** + +```bash +cd /Users/snider/Code/lab/host.uk.com +php artisan db:seed --class="Core\Mod\Uptelligence\Database\Seeders\AltumCodeVendorSeeder" +``` + +**Step 2: Sync actual deployed versions** + +```bash +php artisan uptelligence:sync-altum-versions --path=/Users/snider/Code/lthn/saas/services +``` + +**Step 3: Run the update check** + +```bash +php artisan uptelligence:check-updates +``` + +Expected: Table showing current vs latest versions for all 17 AltumCode vendors. + +**Step 4: Test the skill** + +Open a new Claude Code session and run `/update-altum` to verify the skill loads and shows the workflow. + +**Step 5: Commit any fixes** + +```bash +git add -A && git commit -m "fix: adjustments from end-to-end testing" +``` diff --git a/docs/primitives.md b/docs/primitives.md new file mode 100644 index 0000000..43701f2 --- /dev/null +++ b/docs/primitives.md @@ -0,0 +1,169 @@ +--- +title: Core Primitives +description: The repeated shapes that make CoreGO easy to navigate. +--- + +# Core Primitives + +CoreGO is easiest to use when you read it as a small vocabulary repeated everywhere. Most of the framework is built from the same handful of types. + +## Primitive Map + +| Type | Used For | +|------|----------| +| `Options` | Input values and lightweight metadata | +| `Result` | Output values and success state | +| `Service` | Lifecycle-managed components | +| `Message` | Broadcast events | +| `Query` | Request-response lookups | +| `Task` | Side-effecting work items | + +## `Option` and `Options` + +`Option` is one key-value pair. `Options` is an ordered slice of them. + +```go +opts := core.Options{ + {Key: "name", Value: "brain"}, + {Key: "path", Value: "prompts"}, + {Key: "debug", Value: true}, +} +``` + +Use the helpers to read values: + +```go +name := opts.String("name") +path := opts.String("path") +debug := opts.Bool("debug") +hasPath := opts.Has("path") +raw := opts.Get("name") +``` + +### Important Details + +- `Get` returns the first matching key. +- `String`, `Int`, and `Bool` do not convert between types. +- Missing keys return zero values. +- CLI flags with values are stored as strings, so `--port=8080` should be read with `opts.String("port")`, not `opts.Int("port")`. + +## `Result` + +`Result` is the universal return shape. + +```go +r := core.Result{Value: "ready", OK: true} + +if r.OK { + fmt.Println(r.Value) +} +``` + +It has two jobs: + +- carry a value when work succeeds +- carry either an error or an empty state when work does not succeed + +### `Result.Result(...)` + +The `Result()` method adapts plain Go values and `(value, error)` pairs into a `core.Result`. + +```go +r1 := core.Result{}.Result("hello") +r2 := core.Result{}.Result(file, err) +``` + +This is how several built-in helpers bridge standard-library calls. + +## `Service` + +`Service` is the managed lifecycle DTO stored in the registry. + +```go +svc := core.Service{ + Name: "cache", + Options: core.Options{ + {Key: "backend", Value: "memory"}, + }, + OnStart: func() core.Result { + return core.Result{OK: true} + }, + OnStop: func() core.Result { + return core.Result{OK: true} + }, + OnReload: func() core.Result { + return core.Result{OK: true} + }, +} +``` + +### Important Details + +- `OnStart` and `OnStop` are used by the framework lifecycle. +- `OnReload` is stored on the service DTO, but CoreGO does not currently call it automatically. +- The registry stores `*core.Service`, not arbitrary typed service instances. + +## `Message`, `Query`, and `Task` + +These are simple aliases to `any`. + +```go +type Message any +type Query any +type Task any +``` + +That means your own structs become the protocol: + +```go +type deployStarted struct { + Environment string +} + +type workspaceCountQuery struct{} + +type syncRepositoryTask struct { + Name string +} +``` + +## `TaskWithIdentifier` + +Long-running tasks can opt into task identifiers. + +```go +type indexedTask struct { + ID string +} + +func (t *indexedTask) SetTaskIdentifier(id string) { t.ID = id } +func (t *indexedTask) GetTaskIdentifier() string { return t.ID } +``` + +If a task implements `TaskWithIdentifier`, `PerformAsync` injects the generated `task-N` identifier before dispatch. + +## `ServiceRuntime[T]` + +`ServiceRuntime[T]` is the small helper for packages that want to keep a Core reference and a typed options struct together. + +```go +type agentServiceOptions struct { + WorkspacePath string +} + +type agentService struct { + *core.ServiceRuntime[agentServiceOptions] +} + +runtime := core.NewServiceRuntime(c, agentServiceOptions{ + WorkspacePath: "/srv/agent-workspaces", +}) +``` + +It exposes: + +- `Core()` +- `Options()` +- `Config()` + +This helper does not register anything by itself. It is a composition aid for package authors. diff --git a/docs/services.md b/docs/services.md new file mode 100644 index 0000000..ad95d64 --- /dev/null +++ b/docs/services.md @@ -0,0 +1,152 @@ +--- +title: Services +description: Register, inspect, and lock CoreGO services. +--- + +# Services + +In CoreGO, a service is a named lifecycle entry stored in the Core registry. + +## Register a Service + +```go +c := core.New() + +r := c.Service("audit", core.Service{ + OnStart: func() core.Result { + core.Info("audit started") + return core.Result{OK: true} + }, + OnStop: func() core.Result { + core.Info("audit stopped") + return core.Result{OK: true} + }, +}) +``` + +Registration succeeds when: + +- the name is not empty +- the registry is not locked +- the name is not already in use + +## Read a Service Back + +```go +r := c.Service("audit") +if r.OK { + svc := r.Value.(*core.Service) + _ = svc +} +``` + +The returned value is `*core.Service`. + +## List Registered Services + +```go +names := c.Services() +``` + +### Important Detail + +The current registry is map-backed. `Services()`, `Startables()`, and `Stoppables()` do not promise a stable order. + +## Lifecycle Snapshots + +Use these helpers when you want the current set of startable or stoppable services: + +```go +startables := c.Startables() +stoppables := c.Stoppables() +``` + +They return `[]*core.Service` inside `Result.Value`. + +## Lock the Registry + +CoreGO has a service-lock mechanism, but it is explicit. + +```go +c := core.New() + +c.LockEnable() +c.Service("audit", core.Service{}) +c.Service("cache", core.Service{}) +c.LockApply() +``` + +After `LockApply`, new registrations fail: + +```go +r := c.Service("late", core.Service{}) +fmt.Println(r.OK) // false +``` + +The default lock name is `"srv"`. You can pass a different name if you need a custom lock namespace. + +For the service registry itself, use the default `"srv"` lock path. That is the path used by `Core.Service(...)`. + +## `NewWithFactories` + +For GUI runtimes or factory-driven setup, CoreGO provides `NewWithFactories`. + +```go +r := core.NewWithFactories(nil, map[string]core.ServiceFactory{ + "audit": func() core.Result { + return core.Result{Value: core.Service{ + OnStart: func() core.Result { + return core.Result{OK: true} + }, + }, OK: true} + }, + "cache": func() core.Result { + return core.Result{Value: core.Service{}, OK: true} + }, +}) +``` + +### Important Details + +- each factory must return a `core.Service` in `Result.Value` +- factories are executed in sorted key order +- nil factories are skipped +- the return value is `*core.Runtime` + +## `Runtime` + +`Runtime` is a small wrapper used for external runtimes such as GUI bindings. + +```go +r := core.NewRuntime(nil) +rt := r.Value.(*core.Runtime) + +_ = rt.ServiceStartup(context.Background(), nil) +_ = rt.ServiceShutdown(context.Background()) +``` + +`Runtime.ServiceName()` returns `"Core"`. + +## `ServiceRuntime[T]` for Package Authors + +If you are writing a package on top of CoreGO, use `ServiceRuntime[T]` to keep a typed options struct and the parent `Core` together. + +```go +type repositoryServiceOptions struct { + BaseDirectory string +} + +type repositoryService struct { + *core.ServiceRuntime[repositoryServiceOptions] +} + +func newRepositoryService(c *core.Core) *repositoryService { + return &repositoryService{ + ServiceRuntime: core.NewServiceRuntime(c, repositoryServiceOptions{ + BaseDirectory: "/srv/repos", + }), + } +} +``` + +This is a package-authoring helper. It does not replace the `core.Service` registry entry. diff --git a/docs/skill/index.md b/docs/skill/index.md deleted file mode 100644 index 40ae3ad..0000000 --- a/docs/skill/index.md +++ /dev/null @@ -1,35 +0,0 @@ -# Claude Code Skill - -The `core` skill teaches Claude Code how to use the Core CLI effectively. - -## Installation - -```bash -curl -fsSL https://raw.githubusercontent.com/host-uk/core/main/.claude/skills/core/install.sh | bash -``` - -Or if you have the repo cloned: - -```bash -./.claude/skills/core/install.sh -``` - -## What it does - -Once installed, Claude Code will: - -- Auto-invoke when working in host-uk repositories -- Use `core` commands instead of raw `go`/`php`/`git` commands -- Follow the correct patterns for testing, building, and releasing - -## Manual invocation - -Type `/core` in Claude Code to invoke the skill manually. - -## Updating - -Re-run the install command to update to the latest version. - -## Location - -Skills are installed to `~/.claude/skills/core/SKILL.md`. diff --git a/docs/subsystems.md b/docs/subsystems.md new file mode 100644 index 0000000..f39ea16 --- /dev/null +++ b/docs/subsystems.md @@ -0,0 +1,158 @@ +--- +title: Subsystems +description: Built-in accessors for app metadata, embedded data, filesystem, transport handles, i18n, and CLI. +--- + +# Subsystems + +`Core` gives you a set of built-in subsystems so small applications do not need extra plumbing before they can do useful work. + +## Accessor Map + +| Accessor | Purpose | +|----------|---------| +| `App()` | Application identity and external runtime | +| `Data()` | Named embedded filesystem mounts | +| `Drive()` | Named transport handles | +| `Fs()` | Local filesystem access | +| `I18n()` | Locale collection and translation delegation | +| `Cli()` | Command-line surface over the command tree | + +## `App` + +`App` stores process identity and optional GUI runtime state. + +```go +app := c.App() +app.Name = "agent-workbench" +app.Version = "0.25.0" +app.Description = "workspace runner" +app.Runtime = myRuntime +``` + +`Find` resolves an executable on `PATH` and returns an `*App`. + +```go +r := core.Find("go", "Go toolchain") +``` + +## `Data` + +`Data` mounts named embedded filesystems and makes them addressable through paths like `mount-name/path/to/file`. + +```go +c.Data().New(core.Options{ + {Key: "name", Value: "app"}, + {Key: "source", Value: appFS}, + {Key: "path", Value: "templates"}, +}) +``` + +Read content: + +```go +text := c.Data().ReadString("app/agent.md") +bytes := c.Data().ReadFile("app/agent.md") +list := c.Data().List("app") +names := c.Data().ListNames("app") +``` + +Extract a mounted directory: + +```go +r := c.Data().Extract("app/workspace", "/tmp/workspace", nil) +``` + +### Path Rule + +The first path segment is always the mount name. + +## `Drive` + +`Drive` is a registry for named transport handles. + +```go +c.Drive().New(core.Options{ + {Key: "name", Value: "api"}, + {Key: "transport", Value: "https://api.lthn.ai"}, +}) + +c.Drive().New(core.Options{ + {Key: "name", Value: "mcp"}, + {Key: "transport", Value: "mcp://mcp.lthn.sh"}, +}) +``` + +Read them back: + +```go +handle := c.Drive().Get("api") +hasMCP := c.Drive().Has("mcp") +names := c.Drive().Names() +``` + +## `Fs` + +`Fs` wraps local filesystem operations with a consistent `Result` shape. + +```go +c.Fs().Write("/tmp/core-go/example.txt", "hello") +r := c.Fs().Read("/tmp/core-go/example.txt") +``` + +Other helpers: + +```go +c.Fs().EnsureDir("/tmp/core-go/cache") +c.Fs().List("/tmp/core-go") +c.Fs().Stat("/tmp/core-go/example.txt") +c.Fs().Rename("/tmp/core-go/example.txt", "/tmp/core-go/example-2.txt") +c.Fs().Delete("/tmp/core-go/example-2.txt") +``` + +### Important Details + +- the default `Core` starts with `Fs{root:"/"}` +- relative paths resolve from the current working directory +- `Delete` and `DeleteAll` refuse to remove `/` and `$HOME` + +## `I18n` + +`I18n` collects locale mounts and forwards translation work to a translator implementation when one is registered. + +```go +c.I18n().SetLanguage("en-GB") +``` + +Without a translator, `Translate` returns the message key itself: + +```go +r := c.I18n().Translate("cmd.deploy.description") +``` + +With a translator: + +```go +c.I18n().SetTranslator(myTranslator) +``` + +Then: + +```go +langs := c.I18n().AvailableLanguages() +current := c.I18n().Language() +``` + +## `Cli` + +`Cli` exposes the command registry through a terminal-facing API. + +```go +c.Cli().SetBanner(func(_ *core.Cli) string { + return "Agent Workbench" +}) + +r := c.Cli().Run("workspace", "create", "--name=alpha") +``` + +Use [commands.md](commands.md) for the full command and flag model. diff --git a/docs/testing.md b/docs/testing.md new file mode 100644 index 0000000..656634a --- /dev/null +++ b/docs/testing.md @@ -0,0 +1,118 @@ +--- +title: Testing +description: Test naming and testing patterns used by CoreGO. +--- + +# Testing + +The repository uses `github.com/stretchr/testify/assert` and a simple AX-friendly naming pattern. + +## Test Names + +Use: + +- `_Good` for expected success +- `_Bad` for expected failure +- `_Ugly` for panics, degenerate input, and edge behavior + +Examples from this repository: + +```go +func TestNew_Good(t *testing.T) {} +func TestService_Register_Duplicate_Bad(t *testing.T) {} +func TestCore_Must_Ugly(t *testing.T) {} +``` + +## Start with a Small Core + +```go +c := core.New(core.Options{ + {Key: "name", Value: "test-core"}, +}) +``` + +Then register only the pieces your test needs. + +## Test a Service + +```go +started := false + +c.Service("audit", core.Service{ + OnStart: func() core.Result { + started = true + return core.Result{OK: true} + }, +}) + +r := c.ServiceStartup(context.Background(), nil) +assert.True(t, r.OK) +assert.True(t, started) +``` + +## Test a Command + +```go +c.Command("greet", core.Command{ + Action: func(opts core.Options) core.Result { + return core.Result{Value: "hello " + opts.String("name"), OK: true} + }, +}) + +r := c.Cli().Run("greet", "--name=world") +assert.True(t, r.OK) +assert.Equal(t, "hello world", r.Value) +``` + +## Test a Query or Task + +```go +c.RegisterQuery(func(_ *core.Core, q core.Query) core.Result { + if q == "ping" { + return core.Result{Value: "pong", OK: true} + } + return core.Result{} +}) + +assert.Equal(t, "pong", c.QUERY("ping").Value) +``` + +```go +c.RegisterTask(func(_ *core.Core, t core.Task) core.Result { + if t == "compute" { + return core.Result{Value: 42, OK: true} + } + return core.Result{} +}) + +assert.Equal(t, 42, c.PERFORM("compute").Value) +``` + +## Test Async Work + +For `PerformAsync`, observe completion through the action bus. + +```go +completed := make(chan core.ActionTaskCompleted, 1) + +c.RegisterAction(func(_ *core.Core, msg core.Message) core.Result { + if event, ok := msg.(core.ActionTaskCompleted); ok { + completed <- event + } + return core.Result{OK: true} +}) +``` + +Then wait with normal Go test tools such as channels, timers, or `assert.Eventually`. + +## Use Real Temporary Paths + +When testing `Fs`, `Data.Extract`, or other I/O helpers, use `t.TempDir()` and create realistic paths instead of mocking the filesystem by default. + +## Repository Commands + +```bash +core go test +core go test --run TestPerformAsync_Good +go test ./... +``` diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md deleted file mode 100644 index c075f3a..0000000 --- a/docs/troubleshooting.md +++ /dev/null @@ -1,332 +0,0 @@ -# Troubleshooting - -Common issues and how to resolve them. - -## Installation Issues - -### "command not found: core" - -**Cause:** Go's bin directory is not in your PATH. - -**Fix:** - -```bash -# Add to ~/.bashrc or ~/.zshrc -export PATH="$PATH:$(go env GOPATH)/bin" - -# Then reload -source ~/.bashrc # or ~/.zshrc -``` - -### "go: module github.com/host-uk/core: no matching versions" - -**Cause:** Go module proxy hasn't cached the latest version yet. - -**Fix:** - -```bash -# Bypass proxy -GOPROXY=direct go install github.com/host-uk/core/cmd/core@latest -``` - ---- - -## Build Issues - -### "no Go files in..." - -**Cause:** Core couldn't find a main package to build. - -**Fix:** - -1. Check you're in the correct directory -2. Ensure `.core/build.yaml` has the correct `main` path: - -```yaml -project: - main: ./cmd/myapp # Path to main package -``` - -### "CGO_ENABLED=1 but no C compiler" - -**Cause:** Build requires CGO but no C compiler is available. - -**Fix:** - -```bash -# Option 1: Disable CGO (if not needed) -core build # Core disables CGO by default - -# Option 2: Install a C compiler -# macOS -xcode-select --install - -# Ubuntu/Debian -sudo apt install build-essential - -# Windows -# Install MinGW or use WSL -``` - -### Build succeeds but binary doesn't run - -**Cause:** Built for wrong architecture. - -**Fix:** - -```bash -# Check what you built -file dist/myapp-* - -# Build for your current platform -core build --targets $(go env GOOS)/$(go env GOARCH) -``` - ---- - -## Release Issues - -### "dry-run mode, use --we-are-go-for-launch to publish" - -**This is expected behaviour.** Core runs in dry-run mode by default for safety. - -**To actually publish:** - -```bash -core ci --we-are-go-for-launch -``` - -### "failed to create release: 401 Unauthorized" - -**Cause:** GitHub token missing or invalid. - -**Fix:** - -```bash -# Authenticate with GitHub CLI -gh auth login - -# Or set token directly -export GITHUB_TOKEN=ghp_xxxxxxxxxxxx -``` - -### "no artifacts found in dist/" - -**Cause:** You need to build before releasing. - -**Fix:** - -```bash -# Build first -core build - -# Then release -core ci --we-are-go-for-launch -``` - -### "tag already exists" - -**Cause:** Trying to release a version that's already been released. - -**Fix:** - -1. Update version in your code/config -2. Or delete the existing tag (if intentional): - -```bash -git tag -d v1.0.0 -git push origin :refs/tags/v1.0.0 -``` - ---- - -## Multi-Repo Issues - -### "repos.yaml not found" - -**Cause:** Core can't find the package registry. - -**Fix:** - -Core looks for `repos.yaml` in: -1. Current directory -2. Parent directories (walking up to root) -3. `~/Code/host-uk/repos.yaml` -4. `~/.config/core/repos.yaml` - -Either: -- Run commands from a directory with `repos.yaml` -- Use `--registry /path/to/repos.yaml` -- Run `core setup` to bootstrap a new workspace - -### "failed to clone: Permission denied (publickey)" - -**Cause:** SSH key not configured for GitHub. - -**Fix:** - -```bash -# Check SSH connection -ssh -T git@github.com - -# If that fails, add your key -ssh-add ~/.ssh/id_ed25519 - -# Or configure SSH -# See: https://docs.github.com/en/authentication/connecting-to-github-with-ssh -``` - -### "repository not found" during setup - -**Cause:** You don't have access to the repository, or it doesn't exist. - -**Fix:** - -1. Check you're authenticated: `gh auth status` -2. Verify the repo exists and you have access -3. For private repos, ensure your token has `repo` scope - ---- - -## GitHub Integration Issues - -### "gh: command not found" - -**Cause:** GitHub CLI not installed. - -**Fix:** - -```bash -# macOS -brew install gh - -# Ubuntu/Debian -sudo apt install gh - -# Windows -winget install GitHub.cli - -# Then authenticate -gh auth login -``` - -### "core dev issues" shows nothing - -**Possible causes:** - -1. No open issues exist -2. Not authenticated with GitHub -3. Not in a directory with `repos.yaml` - -**Fix:** - -```bash -# Check auth -gh auth status - -# Check you're in a workspace -ls repos.yaml - -# Show all issues including closed -core dev issues --all -``` - ---- - -## PHP Issues - -### "frankenphp: command not found" - -**Cause:** FrankenPHP not installed. - -**Fix:** - -```bash -# macOS -brew install frankenphp - -# Or use Docker -core php dev --docker -``` - -### "core php dev" exits immediately - -**Cause:** Usually a port conflict or missing dependency. - -**Fix:** - -```bash -# Check if port 8000 is in use -lsof -i :8000 - -# Try a different port -core php dev --port 9000 - -# Check logs for errors -core php logs -``` - ---- - -## Performance Issues - -### Commands are slow - -**Possible causes:** - -1. Large number of repositories -2. Network latency to GitHub -3. Go module downloads - -**Fix:** - -```bash -# For multi-repo commands, use health for quick check -core dev health # Fast summary - -# Instead of -core dev work --status # Full table (slower) - -# Pre-download Go modules -go mod download -``` - ---- - -## Getting More Help - -### Enable Verbose Output - -Most commands support `-v` or `--verbose`: - -```bash -core build -v -core go test -v -``` - -### Check Environment - -```bash -core doctor -``` - -This verifies all required tools are installed and configured. - -### Report Issues - -If you've found a bug: - -1. Check existing issues: https://github.com/host-uk/core/issues -2. Create a new issue with: - - Core version (`core --version`) - - OS and architecture (`go env GOOS GOARCH`) - - Command that failed - - Full error output - ---- - -## See Also - -- [Getting Started](getting-started.md) - Installation and first steps -- [Configuration](configuration.md) - Config file reference -- [doctor](cmd/doctor/) - Environment verification diff --git a/docs/workflows.md b/docs/workflows.md deleted file mode 100644 index 96b0c9f..0000000 --- a/docs/workflows.md +++ /dev/null @@ -1,334 +0,0 @@ -# Workflows - -Common end-to-end workflows for Core CLI. - -## Go Project: Build and Release - -Complete workflow from code to GitHub release. - -```bash -# 1. Run tests -core go test - -# 2. Check coverage -core go cov --threshold 80 - -# 3. Format and lint -core go fmt --fix -core go lint - -# 4. Build for all platforms -core build --targets linux/amd64,linux/arm64,darwin/arm64,windows/amd64 - -# 5. Preview release (dry-run) -core ci - -# 6. Publish -core ci --we-are-go-for-launch -``` - -**Output structure:** - -``` -dist/ -├── myapp-darwin-arm64.tar.gz -├── myapp-linux-amd64.tar.gz -├── myapp-linux-arm64.tar.gz -├── myapp-windows-amd64.zip -└── CHECKSUMS.txt -``` - ---- - -## PHP Project: Development to Deployment - -Local development through to production deployment. - -```bash -# 1. Start development environment -core php dev - -# 2. Run tests (in another terminal) -core php test --parallel - -# 3. Check code quality -core php fmt --fix -core php analyse - -# 4. Deploy to staging -core php deploy --staging --wait - -# 5. Verify staging -# (manual testing) - -# 6. Deploy to production -core php deploy --wait - -# 7. Monitor -core php deploy:status -``` - -**Rollback if needed:** - -```bash -core php deploy:rollback -``` - ---- - -## Multi-Repo: Daily Workflow - -Working across the host-uk monorepo. - -### Morning: Sync Everything - -```bash -# Quick health check -core dev health - -# Pull all repos that are behind -core dev pull --all - -# Check for issues assigned to you -core dev issues --assignee @me -``` - -### During Development - -```bash -# Work on code... - -# Check status across all repos -core dev work --status - -# Commit changes (Claude-assisted messages) -core dev commit - -# Push when ready -core dev push -``` - -### End of Day - -```bash -# Full workflow: status → commit → push -core dev work - -# Check CI status -core dev ci - -# Review any failed builds -core dev ci --failed -``` - ---- - -## New Developer: Environment Setup - -First-time setup for a new team member. - -```bash -# 1. Verify prerequisites -core doctor - -# 2. Create workspace directory -mkdir ~/Code/host-uk && cd ~/Code/host-uk - -# 3. Bootstrap workspace (interactive) -core setup - -# 4. Select packages in wizard -# Use arrow keys, space to select, enter to confirm - -# 5. Verify setup -core dev health - -# 6. Start working -core dev work --status -``` - ---- - -## CI Pipeline: Automated Build - -Example GitHub Actions workflow. - -```yaml -# .github/workflows/release.yml -name: Release - -on: - push: - tags: - - 'v*' - -jobs: - release: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-go@v5 - with: - go-version: '1.23' - - - name: Install Core - run: go install github.com/host-uk/core/cmd/core@latest - - - name: Build - run: core build --ci - - - name: Release - run: core ci --we-are-go-for-launch - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -``` - ---- - -## SDK Generation: API Client Updates - -Generate SDK clients when API changes. - -```bash -# 1. Validate OpenAPI spec -core sdk validate - -# 2. Check for breaking changes -core sdk diff --base v1.0.0 - -# 3. Generate SDKs -core build sdk - -# 4. Review generated code -git diff - -# 5. Commit if satisfied -git add -A && git commit -m "chore: regenerate SDK clients" -``` - ---- - -## Dependency Update: Cross-Repo Change - -When updating a shared package like `core-php`. - -```bash -# 1. Make changes in core-php -cd ~/Code/host-uk/core-php -# ... edit code ... - -# 2. Run tests -core go test # or core php test - -# 3. Check what depends on core-php -core dev impact core-php - -# Output: -# core-tenant (direct) -# core-admin (via core-tenant) -# core-api (direct) -# ... - -# 4. Commit core-php changes -core dev commit - -# 5. Update dependent packages -cd ~/Code/host-uk -for pkg in core-tenant core-admin core-api; do - cd $pkg - composer update host-uk/core-php - core php test - cd .. -done - -# 6. Commit all updates -core dev work -``` - ---- - -## Hotfix: Emergency Production Fix - -Fast path for critical fixes. - -```bash -# 1. Create hotfix branch -git checkout -b hotfix/critical-bug main - -# 2. Make fix -# ... edit code ... - -# 3. Test -core go test --run TestCriticalPath - -# 4. Build -core build - -# 5. Preview release -core ci --prerelease - -# 6. Publish hotfix -core ci --we-are-go-for-launch --prerelease - -# 7. Merge back to main -git checkout main -git merge hotfix/critical-bug -git push -``` - ---- - -## Documentation: Sync Across Repos - -Keep documentation synchronised. - -```bash -# 1. List all docs -core docs list - -# 2. Sync to central location -core docs sync --output ./docs-site - -# 3. Review changes -git diff docs-site/ - -# 4. Commit -git add docs-site/ -git commit -m "docs: sync from packages" -``` - ---- - -## Troubleshooting: Failed Build - -When a build fails. - -```bash -# 1. Check environment -core doctor - -# 2. Clean previous artifacts -rm -rf dist/ - -# 3. Verbose build -core build -v - -# 4. If Go-specific issues -core go mod tidy -core go mod verify - -# 5. Check for test failures -core go test -v - -# 6. Review configuration -cat .core/build.yaml -``` - ---- - -## See Also - -- [Getting Started](getting-started.md) - First-time setup -- [Troubleshooting](troubleshooting.md) - When things go wrong -- [Configuration](configuration.md) - Config file reference diff --git a/drive.go b/drive.go new file mode 100644 index 0000000..e6988c4 --- /dev/null +++ b/drive.go @@ -0,0 +1,112 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Drive is the resource handle registry for transport connections. +// Packages register their transport handles (API, MCP, SSH, VPN) +// and other packages access them by name. +// +// Register a transport: +// +// c.Drive().New(core.Options{ +// {Key: "name", Value: "api"}, +// {Key: "transport", Value: "https://api.lthn.ai"}, +// }) +// c.Drive().New(core.Options{ +// {Key: "name", Value: "ssh"}, +// {Key: "transport", Value: "ssh://claude@10.69.69.165"}, +// }) +// c.Drive().New(core.Options{ +// {Key: "name", Value: "mcp"}, +// {Key: "transport", Value: "mcp://mcp.lthn.sh"}, +// }) +// +// Retrieve a handle: +// +// api := c.Drive().Get("api") +package core + +import ( + "sync" +) + +// DriveHandle holds a named transport resource. +type DriveHandle struct { + Name string + Transport string + Options Options +} + +// Drive manages named transport handles. +type Drive struct { + handles map[string]*DriveHandle + mu sync.RWMutex +} + +// New registers a transport handle. +// +// c.Drive().New(core.Options{ +// {Key: "name", Value: "api"}, +// {Key: "transport", Value: "https://api.lthn.ai"}, +// }) +func (d *Drive) New(opts Options) Result { + name := opts.String("name") + if name == "" { + return Result{} + } + + transport := opts.String("transport") + + d.mu.Lock() + defer d.mu.Unlock() + + if d.handles == nil { + d.handles = make(map[string]*DriveHandle) + } + + cp := make(Options, len(opts)) + copy(cp, opts) + handle := &DriveHandle{ + Name: name, + Transport: transport, + Options: cp, + } + + d.handles[name] = handle + return Result{handle, true} +} + +// Get returns a handle by name. +// +// r := c.Drive().Get("api") +// if r.OK { handle := r.Value.(*DriveHandle) } +func (d *Drive) Get(name string) Result { + d.mu.RLock() + defer d.mu.RUnlock() + if d.handles == nil { + return Result{} + } + h, ok := d.handles[name] + if !ok { + return Result{} + } + return Result{h, true} +} + +// Has returns true if a handle is registered. +// +// if c.Drive().Has("ssh") { ... } +func (d *Drive) Has(name string) bool { + return d.Get(name).OK +} + +// Names returns all registered handle names. +// +// names := c.Drive().Names() +func (d *Drive) Names() []string { + d.mu.RLock() + defer d.mu.RUnlock() + var names []string + for k := range d.handles { + names = append(names, k) + } + return names +} diff --git a/drive_test.go b/drive_test.go new file mode 100644 index 0000000..afd6a34 --- /dev/null +++ b/drive_test.go @@ -0,0 +1,80 @@ +package core_test + +import ( + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- Drive (Transport Handles) --- + +func TestDrive_New_Good(t *testing.T) { + c := New().Value.(*Core) + r := c.Drive().New(Options{ + {Key: "name", Value: "api"}, + {Key: "transport", Value: "https://api.lthn.ai"}, + }) + assert.True(t, r.OK) + assert.Equal(t, "api", r.Value.(*DriveHandle).Name) + assert.Equal(t, "https://api.lthn.ai", r.Value.(*DriveHandle).Transport) +} + +func TestDrive_New_Bad(t *testing.T) { + c := New().Value.(*Core) + // Missing name + r := c.Drive().New(Options{ + {Key: "transport", Value: "https://api.lthn.ai"}, + }) + assert.False(t, r.OK) +} + +func TestDrive_Get_Good(t *testing.T) { + c := New().Value.(*Core) + c.Drive().New(Options{ + {Key: "name", Value: "ssh"}, + {Key: "transport", Value: "ssh://claude@10.69.69.165"}, + }) + r := c.Drive().Get("ssh") + assert.True(t, r.OK) + handle := r.Value.(*DriveHandle) + assert.Equal(t, "ssh://claude@10.69.69.165", handle.Transport) +} + +func TestDrive_Get_Bad(t *testing.T) { + c := New().Value.(*Core) + r := c.Drive().Get("nonexistent") + assert.False(t, r.OK) +} + +func TestDrive_Has_Good(t *testing.T) { + c := New().Value.(*Core) + c.Drive().New(Options{{Key: "name", Value: "mcp"}, {Key: "transport", Value: "mcp://mcp.lthn.sh"}}) + assert.True(t, c.Drive().Has("mcp")) + assert.False(t, c.Drive().Has("missing")) +} + +func TestDrive_Names_Good(t *testing.T) { + c := New().Value.(*Core) + c.Drive().New(Options{{Key: "name", Value: "api"}, {Key: "transport", Value: "https://api.lthn.ai"}}) + c.Drive().New(Options{{Key: "name", Value: "ssh"}, {Key: "transport", Value: "ssh://claude@10.69.69.165"}}) + c.Drive().New(Options{{Key: "name", Value: "mcp"}, {Key: "transport", Value: "mcp://mcp.lthn.sh"}}) + names := c.Drive().Names() + assert.Len(t, names, 3) + assert.Contains(t, names, "api") + assert.Contains(t, names, "ssh") + assert.Contains(t, names, "mcp") +} + +func TestDrive_OptionsPreserved_Good(t *testing.T) { + c := New().Value.(*Core) + c.Drive().New(Options{ + {Key: "name", Value: "api"}, + {Key: "transport", Value: "https://api.lthn.ai"}, + {Key: "timeout", Value: 30}, + }) + r := c.Drive().Get("api") + assert.True(t, r.OK) + handle := r.Value.(*DriveHandle) + assert.Equal(t, 30, handle.Options.Int("timeout")) +} diff --git a/embed.go b/embed.go new file mode 100644 index 0000000..e6a5766 --- /dev/null +++ b/embed.go @@ -0,0 +1,668 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Embedded assets for the Core framework. +// +// Embed provides scoped filesystem access for go:embed and any fs.FS. +// Also includes build-time asset packing (AST scanner + compressor) +// and template-based directory extraction. +// +// Usage (mount): +// +// sub, _ := core.Mount(myFS, "lib/persona") +// content, _ := sub.ReadString("secops/developer.md") +// +// Usage (extract): +// +// core.Extract(fsys, "/tmp/workspace", data) +// +// Usage (pack): +// +// refs, _ := core.ScanAssets([]string{"main.go"}) +// source, _ := core.GeneratePack(refs) +package core + +import ( + "bytes" + "compress/gzip" + "embed" + "encoding/base64" + "fmt" + "go/ast" + "go/parser" + "go/token" + "io" + "io/fs" + "os" + "path/filepath" + "sync" + "text/template" +) + +// --- Runtime: Asset Registry --- + +// AssetGroup holds a named collection of packed assets. +type AssetGroup struct { + assets map[string]string // name → compressed data +} + +var ( + assetGroups = make(map[string]*AssetGroup) + assetGroupsMu sync.RWMutex +) + +// AddAsset registers a packed asset at runtime (called from generated init()). +func AddAsset(group, name, data string) { + assetGroupsMu.Lock() + defer assetGroupsMu.Unlock() + + g, ok := assetGroups[group] + if !ok { + g = &AssetGroup{assets: make(map[string]string)} + assetGroups[group] = g + } + g.assets[name] = data +} + +// GetAsset retrieves and decompresses a packed asset. +// +// r := core.GetAsset("mygroup", "greeting") +// if r.OK { content := r.Value.(string) } +func GetAsset(group, name string) Result { + assetGroupsMu.RLock() + g, ok := assetGroups[group] + if !ok { + assetGroupsMu.RUnlock() + return Result{} + } + data, ok := g.assets[name] + assetGroupsMu.RUnlock() + if !ok { + return Result{} + } + s, err := decompress(data) + if err != nil { + return Result{err, false} + } + return Result{s, true} +} + +// GetAssetBytes retrieves a packed asset as bytes. +// +// r := core.GetAssetBytes("mygroup", "file") +// if r.OK { data := r.Value.([]byte) } +func GetAssetBytes(group, name string) Result { + r := GetAsset(group, name) + if !r.OK { + return r + } + return Result{[]byte(r.Value.(string)), true} +} + +// --- Build-time: AST Scanner --- + +// AssetRef is a reference to an asset found in source code. +type AssetRef struct { + Name string + Path string + Group string + FullPath string +} + +// ScannedPackage holds all asset references from a set of source files. +type ScannedPackage struct { + PackageName string + BaseDirectory string + Groups []string + Assets []AssetRef +} + +// ScanAssets parses Go source files and finds asset references. +// Looks for calls to: core.GetAsset("group", "name"), core.AddAsset, etc. +func ScanAssets(filenames []string) Result { + packageMap := make(map[string]*ScannedPackage) + var scanErr error + + for _, filename := range filenames { + fset := token.NewFileSet() + node, err := parser.ParseFile(fset, filename, nil, parser.AllErrors) + if err != nil { + return Result{err, false} + } + + baseDir := filepath.Dir(filename) + pkg, ok := packageMap[baseDir] + if !ok { + pkg = &ScannedPackage{BaseDirectory: baseDir} + packageMap[baseDir] = pkg + } + pkg.PackageName = node.Name.Name + + ast.Inspect(node, func(n ast.Node) bool { + if scanErr != nil { + return false + } + call, ok := n.(*ast.CallExpr) + if !ok { + return true + } + + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return true + } + + ident, ok := sel.X.(*ast.Ident) + if !ok { + return true + } + + // Look for core.GetAsset or mewn.String patterns + if ident.Name == "core" || ident.Name == "mewn" { + switch sel.Sel.Name { + case "GetAsset", "GetAssetBytes", "String", "MustString", "Bytes", "MustBytes": + if len(call.Args) >= 1 { + if lit, ok := call.Args[len(call.Args)-1].(*ast.BasicLit); ok { + path := TrimPrefix(TrimSuffix(lit.Value, "\""), "\"") + group := "." + if len(call.Args) >= 2 { + if glit, ok := call.Args[0].(*ast.BasicLit); ok { + group = TrimPrefix(TrimSuffix(glit.Value, "\""), "\"") + } + } + fullPath, err := filepath.Abs(filepath.Join(baseDir, group, path)) + if err != nil { + scanErr = Wrap(err, "core.ScanAssets", Join(" ", "could not determine absolute path for asset", path, "in group", group)) + return false + } + pkg.Assets = append(pkg.Assets, AssetRef{ + Name: path, + + Group: group, + FullPath: fullPath, + }) + } + } + case "Group": + // Variable assignment: g := core.Group("./assets") + if len(call.Args) == 1 { + if lit, ok := call.Args[0].(*ast.BasicLit); ok { + path := TrimPrefix(TrimSuffix(lit.Value, "\""), "\"") + fullPath, err := filepath.Abs(filepath.Join(baseDir, path)) + if err != nil { + scanErr = Wrap(err, "core.ScanAssets", Join(" ", "could not determine absolute path for group", path)) + return false + } + pkg.Groups = append(pkg.Groups, fullPath) + // Track for variable resolution + } + } + } + } + + return true + }) + if scanErr != nil { + return Result{scanErr, false} + } + } + + var result []ScannedPackage + for _, pkg := range packageMap { + result = append(result, *pkg) + } + return Result{result, true} +} + +// GeneratePack creates Go source code that embeds the scanned assets. +func GeneratePack(pkg ScannedPackage) Result { + b := NewBuilder() + + b.WriteString(fmt.Sprintf("package %s\n\n", pkg.PackageName)) + b.WriteString("// Code generated by core pack. DO NOT EDIT.\n\n") + + if len(pkg.Assets) == 0 && len(pkg.Groups) == 0 { + return Result{b.String(), true} + } + + b.WriteString("import \"dappco.re/go/core\"\n\n") + b.WriteString("func init() {\n") + + // Pack groups (entire directories) + packed := make(map[string]bool) + for _, groupPath := range pkg.Groups { + files, err := getAllFiles(groupPath) + if err != nil { + return Result{err, false} + } + for _, file := range files { + if packed[file] { + continue + } + data, err := compressFile(file) + if err != nil { + return Result{err, false} + } + localPath := TrimPrefix(file, groupPath+"/") + relGroup, err := filepath.Rel(pkg.BaseDirectory, groupPath) + if err != nil { + return Result{err, false} + } + b.WriteString(fmt.Sprintf("\tcore.AddAsset(%q, %q, %q)\n", relGroup, localPath, data)) + packed[file] = true + } + } + + // Pack individual assets + for _, asset := range pkg.Assets { + if packed[asset.FullPath] { + continue + } + data, err := compressFile(asset.FullPath) + if err != nil { + return Result{err, false} + } + b.WriteString(fmt.Sprintf("\tcore.AddAsset(%q, %q, %q)\n", asset.Group, asset.Name, data)) + packed[asset.FullPath] = true + } + + b.WriteString("}\n") + return Result{b.String(), true} +} + +// --- Compression --- + +func compressFile(path string) (string, error) { + data, err := os.ReadFile(path) + if err != nil { + return "", err + } + return compress(string(data)) +} + +func compress(input string) (string, error) { + var buf bytes.Buffer + b64 := base64.NewEncoder(base64.StdEncoding, &buf) + gz, err := gzip.NewWriterLevel(b64, gzip.BestCompression) + if err != nil { + return "", err + } + if _, err := gz.Write([]byte(input)); err != nil { + _ = gz.Close() + _ = b64.Close() + return "", err + } + if err := gz.Close(); err != nil { + _ = b64.Close() + return "", err + } + if err := b64.Close(); err != nil { + return "", err + } + return buf.String(), nil +} + +func decompress(input string) (string, error) { + b64 := base64.NewDecoder(base64.StdEncoding, NewReader(input)) + gz, err := gzip.NewReader(b64) + if err != nil { + return "", err + } + + data, err := io.ReadAll(gz) + if err != nil { + return "", err + } + if err := gz.Close(); err != nil { + return "", err + } + return string(data), nil +} + +func getAllFiles(dir string) ([]string, error) { + var result []string + err := filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + if !d.IsDir() { + result = append(result, path) + } + return nil + }) + return result, err +} + +// --- Embed: Scoped Filesystem Mount --- + +// Embed wraps an fs.FS with a basedir for scoped access. +// All paths are relative to basedir. +type Embed struct { + basedir string + fsys fs.FS + embedFS *embed.FS // original embed.FS for type-safe access via EmbedFS() +} + +// Mount creates a scoped view of an fs.FS anchored at basedir. +// +// r := core.Mount(myFS, "lib/prompts") +// if r.OK { emb := r.Value.(*Embed) } +func Mount(fsys fs.FS, basedir string) Result { + s := &Embed{fsys: fsys, basedir: basedir} + + if efs, ok := fsys.(embed.FS); ok { + s.embedFS = &efs + } + + if r := s.ReadDir("."); !r.OK { + return r + } + return Result{s, true} +} + +// MountEmbed creates a scoped view of an embed.FS. +// +// r := core.MountEmbed(myFS, "testdata") +func MountEmbed(efs embed.FS, basedir string) Result { + return Mount(efs, basedir) +} + +func (s *Embed) path(name string) Result { + joined := filepath.ToSlash(filepath.Join(s.basedir, name)) + if HasPrefix(joined, "..") || Contains(joined, "/../") || HasSuffix(joined, "/..") { + return Result{E("embed.path", Concat("path traversal rejected: ", name), nil), false} + } + return Result{joined, true} +} + +// Open opens the named file for reading. +// +// r := emb.Open("test.txt") +// if r.OK { file := r.Value.(fs.File) } +func (s *Embed) Open(name string) Result { + r := s.path(name) + if !r.OK { + return r + } + f, err := s.fsys.Open(r.Value.(string)) + if err != nil { + return Result{err, false} + } + return Result{f, true} +} + +// ReadDir reads the named directory. +func (s *Embed) ReadDir(name string) Result { + r := s.path(name) + if !r.OK { + return r + } + return Result{}.Result(fs.ReadDir(s.fsys, r.Value.(string))) +} + +// ReadFile reads the named file. +// +// r := emb.ReadFile("test.txt") +// if r.OK { data := r.Value.([]byte) } +func (s *Embed) ReadFile(name string) Result { + r := s.path(name) + if !r.OK { + return r + } + data, err := fs.ReadFile(s.fsys, r.Value.(string)) + if err != nil { + return Result{err, false} + } + return Result{data, true} +} + +// ReadString reads the named file as a string. +// +// r := emb.ReadString("test.txt") +// if r.OK { content := r.Value.(string) } +func (s *Embed) ReadString(name string) Result { + r := s.ReadFile(name) + if !r.OK { + return r + } + return Result{string(r.Value.([]byte)), true} +} + +// Sub returns a new Embed anchored at a subdirectory within this mount. +// +// r := emb.Sub("testdata") +// if r.OK { sub := r.Value.(*Embed) } +func (s *Embed) Sub(subDir string) Result { + r := s.path(subDir) + if !r.OK { + return r + } + sub, err := fs.Sub(s.fsys, r.Value.(string)) + if err != nil { + return Result{err, false} + } + return Result{&Embed{fsys: sub, basedir: "."}, true} +} + +// FS returns the underlying fs.FS. +func (s *Embed) FS() fs.FS { + return s.fsys +} + +// EmbedFS returns the underlying embed.FS if mounted from one. +// Returns zero embed.FS if mounted from a non-embed source. +func (s *Embed) EmbedFS() embed.FS { + if s.embedFS != nil { + return *s.embedFS + } + return embed.FS{} +} + +// BaseDirectory returns the base directory this Embed is anchored at. +func (s *Embed) BaseDirectory() string { + return s.basedir +} + +// --- Template Extraction --- + +// ExtractOptions configures template extraction. +type ExtractOptions struct { + // TemplateFilters identifies template files by substring match. + // Default: [".tmpl"] + TemplateFilters []string + + // IgnoreFiles is a set of filenames to skip during extraction. + IgnoreFiles map[string]struct{} + + // RenameFiles maps original filenames to new names. + RenameFiles map[string]string +} + +// Extract copies a template directory from an fs.FS to targetDir, +// processing Go text/template in filenames and file contents. +// +// Files containing a template filter substring (default: ".tmpl") have +// their contents processed through text/template with the given data. +// The filter is stripped from the output filename. +// +// Directory and file names can contain Go template expressions: +// {{.Name}}/main.go → myproject/main.go +// +// Data can be any struct or map[string]string for template substitution. +func Extract(fsys fs.FS, targetDir string, data any, opts ...ExtractOptions) Result { + opt := ExtractOptions{ + TemplateFilters: []string{".tmpl"}, + IgnoreFiles: make(map[string]struct{}), + RenameFiles: make(map[string]string), + } + if len(opts) > 0 { + if len(opts[0].TemplateFilters) > 0 { + opt.TemplateFilters = opts[0].TemplateFilters + } + if opts[0].IgnoreFiles != nil { + opt.IgnoreFiles = opts[0].IgnoreFiles + } + if opts[0].RenameFiles != nil { + opt.RenameFiles = opts[0].RenameFiles + } + } + + // Ensure target directory exists + targetDir, err := filepath.Abs(targetDir) + if err != nil { + return Result{err, false} + } + if err := os.MkdirAll(targetDir, 0755); err != nil { + return Result{err, false} + } + + // Categorise files + var dirs []string + var templateFiles []string + var standardFiles []string + + err = fs.WalkDir(fsys, ".", func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + if path == "." { + return nil + } + if d.IsDir() { + dirs = append(dirs, path) + return nil + } + filename := filepath.Base(path) + if _, ignored := opt.IgnoreFiles[filename]; ignored { + return nil + } + if isTemplate(filename, opt.TemplateFilters) { + templateFiles = append(templateFiles, path) + } else { + standardFiles = append(standardFiles, path) + } + return nil + }) + if err != nil { + return Result{err, false} + } + + // safePath ensures a rendered path stays under targetDir. + safePath := func(rendered string) (string, error) { + abs, err := filepath.Abs(rendered) + if err != nil { + return "", err + } + if !HasPrefix(abs, targetDir+string(filepath.Separator)) && abs != targetDir { + return "", E("embed.Extract", Concat("path escapes target: ", abs), nil) + } + return abs, nil + } + + // Create directories (names may contain templates) + for _, dir := range dirs { + target, err := safePath(renderPath(filepath.Join(targetDir, dir), data)) + if err != nil { + return Result{err, false} + } + if err := os.MkdirAll(target, 0755); err != nil { + return Result{err, false} + } + } + + // Process template files + for _, path := range templateFiles { + tmpl, err := template.ParseFS(fsys, path) + if err != nil { + return Result{err, false} + } + + targetFile := renderPath(filepath.Join(targetDir, path), data) + + // Strip template filters from filename + dir := filepath.Dir(targetFile) + name := filepath.Base(targetFile) + for _, filter := range opt.TemplateFilters { + name = Replace(name, filter, "") + } + if renamed := opt.RenameFiles[name]; renamed != "" { + name = renamed + } + targetFile, err = safePath(filepath.Join(dir, name)) + if err != nil { + return Result{err, false} + } + + f, err := os.Create(targetFile) + if err != nil { + return Result{err, false} + } + if err := tmpl.Execute(f, data); err != nil { + f.Close() + return Result{err, false} + } + f.Close() + } + + // Copy standard files + for _, path := range standardFiles { + targetPath := path + name := filepath.Base(path) + if renamed := opt.RenameFiles[name]; renamed != "" { + targetPath = filepath.Join(filepath.Dir(path), renamed) + } + target, err := safePath(renderPath(filepath.Join(targetDir, targetPath), data)) + if err != nil { + return Result{err, false} + } + if err := copyFile(fsys, path, target); err != nil { + return Result{err, false} + } + } + + return Result{OK: true} +} + +func isTemplate(filename string, filters []string) bool { + for _, f := range filters { + if Contains(filename, f) { + return true + } + } + return false +} + +func renderPath(path string, data any) string { + if data == nil { + return path + } + tmpl, err := template.New("path").Parse(path) + if err != nil { + return path + } + var buf bytes.Buffer + if err := tmpl.Execute(&buf, data); err != nil { + return path + } + return buf.String() +} + +func copyFile(fsys fs.FS, source, target string) error { + s, err := fsys.Open(source) + if err != nil { + return err + } + defer s.Close() + + if err := os.MkdirAll(filepath.Dir(target), 0755); err != nil { + return err + } + + d, err := os.Create(target) + if err != nil { + return err + } + defer d.Close() + + _, err = io.Copy(d, s) + return err +} diff --git a/embed_test.go b/embed_test.go new file mode 100644 index 0000000..99fc7cd --- /dev/null +++ b/embed_test.go @@ -0,0 +1,256 @@ +package core_test + +import ( + "bytes" + "compress/gzip" + "encoding/base64" + "os" + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- Mount --- + +func TestMount_Good(t *testing.T) { + r := Mount(testFS, "testdata") + assert.True(t, r.OK) +} + +func TestMount_Bad(t *testing.T) { + r := Mount(testFS, "nonexistent") + assert.False(t, r.OK) +} + +// --- Embed methods --- + +func TestEmbed_ReadFile_Good(t *testing.T) { + emb := Mount(testFS, "testdata").Value.(*Embed) + r := emb.ReadFile("test.txt") + assert.True(t, r.OK) + assert.Equal(t, "hello from testdata\n", string(r.Value.([]byte))) +} + +func TestEmbed_ReadString_Good(t *testing.T) { + emb := Mount(testFS, "testdata").Value.(*Embed) + r := emb.ReadString("test.txt") + assert.True(t, r.OK) + assert.Equal(t, "hello from testdata\n", r.Value.(string)) +} + +func TestEmbed_Open_Good(t *testing.T) { + emb := Mount(testFS, "testdata").Value.(*Embed) + r := emb.Open("test.txt") + assert.True(t, r.OK) +} + +func TestEmbed_ReadDir_Good(t *testing.T) { + emb := Mount(testFS, "testdata").Value.(*Embed) + r := emb.ReadDir(".") + assert.True(t, r.OK) + assert.NotEmpty(t, r.Value) +} + +func TestEmbed_Sub_Good(t *testing.T) { + emb := Mount(testFS, ".").Value.(*Embed) + r := emb.Sub("testdata") + assert.True(t, r.OK) + sub := r.Value.(*Embed) + r2 := sub.ReadFile("test.txt") + assert.True(t, r2.OK) +} + +func TestEmbed_BaseDir_Good(t *testing.T) { + emb := Mount(testFS, "testdata").Value.(*Embed) + assert.Equal(t, "testdata", emb.BaseDirectory()) +} + +func TestEmbed_FS_Good(t *testing.T) { + emb := Mount(testFS, "testdata").Value.(*Embed) + assert.NotNil(t, emb.FS()) +} + +func TestEmbed_EmbedFS_Good(t *testing.T) { + emb := Mount(testFS, "testdata").Value.(*Embed) + efs := emb.EmbedFS() + _, err := efs.ReadFile("testdata/test.txt") + assert.NoError(t, err) +} + +// --- Extract --- + +func TestExtract_Good(t *testing.T) { + dir := t.TempDir() + r := Extract(testFS, dir, nil) + assert.True(t, r.OK) + + content, err := os.ReadFile(dir + "/testdata/test.txt") + assert.NoError(t, err) + assert.Equal(t, "hello from testdata\n", string(content)) +} + +// --- Asset Pack --- + +func TestAddGetAsset_Good(t *testing.T) { + AddAsset("test-group", "greeting", mustCompress("hello world")) + r := GetAsset("test-group", "greeting") + assert.True(t, r.OK) + assert.Equal(t, "hello world", r.Value.(string)) +} + +func TestGetAsset_Bad(t *testing.T) { + r := GetAsset("missing-group", "missing") + assert.False(t, r.OK) +} + +func TestGetAssetBytes_Good(t *testing.T) { + AddAsset("bytes-group", "file", mustCompress("binary content")) + r := GetAssetBytes("bytes-group", "file") + assert.True(t, r.OK) + assert.Equal(t, []byte("binary content"), r.Value.([]byte)) +} + +func TestMountEmbed_Good(t *testing.T) { + r := MountEmbed(testFS, "testdata") + assert.True(t, r.OK) +} + +// --- ScanAssets --- + +func TestScanAssets_Good(t *testing.T) { + r := ScanAssets([]string{"testdata/scantest/sample.go"}) + assert.True(t, r.OK) + pkgs := r.Value.([]ScannedPackage) + assert.Len(t, pkgs, 1) + assert.Equal(t, "scantest", pkgs[0].PackageName) +} + +func TestScanAssets_Bad(t *testing.T) { + r := ScanAssets([]string{"nonexistent.go"}) + assert.False(t, r.OK) +} + +func TestGeneratePack_Empty_Good(t *testing.T) { + pkg := ScannedPackage{PackageName: "empty"} + r := GeneratePack(pkg) + assert.True(t, r.OK) + assert.Contains(t, r.Value.(string), "package empty") +} + +func TestGeneratePack_WithFiles_Good(t *testing.T) { + dir := t.TempDir() + assetDir := dir + "/mygroup" + os.MkdirAll(assetDir, 0755) + os.WriteFile(assetDir+"/hello.txt", []byte("hello world"), 0644) + + source := "package test\nimport \"dappco.re/go/core\"\nfunc example() {\n\t_, _ = core.GetAsset(\"mygroup\", \"hello.txt\")\n}\n" + goFile := dir + "/test.go" + os.WriteFile(goFile, []byte(source), 0644) + + sr := ScanAssets([]string{goFile}) + assert.True(t, sr.OK) + pkgs := sr.Value.([]ScannedPackage) + + r := GeneratePack(pkgs[0]) + assert.True(t, r.OK) + assert.Contains(t, r.Value.(string), "core.AddAsset") +} + +// --- Extract (template + nested) --- + +func TestExtract_WithTemplate_Good(t *testing.T) { + dir := t.TempDir() + + // Create an in-memory FS with a template file and a plain file + tmplDir := os.DirFS(t.TempDir()) + + // Use a real temp dir with files + srcDir := t.TempDir() + os.WriteFile(srcDir+"/plain.txt", []byte("static content"), 0644) + os.WriteFile(srcDir+"/greeting.tmpl", []byte("Hello {{.Name}}!"), 0644) + os.MkdirAll(srcDir+"/sub", 0755) + os.WriteFile(srcDir+"/sub/nested.txt", []byte("nested"), 0644) + + _ = tmplDir + fsys := os.DirFS(srcDir) + data := map[string]string{"Name": "World"} + + r := Extract(fsys, dir, data) + assert.True(t, r.OK) + + // Plain file copied + content, err := os.ReadFile(dir + "/plain.txt") + assert.NoError(t, err) + assert.Equal(t, "static content", string(content)) + + // Template processed and .tmpl stripped + greeting, err := os.ReadFile(dir + "/greeting") + assert.NoError(t, err) + assert.Equal(t, "Hello World!", string(greeting)) + + // Nested directory preserved + nested, err := os.ReadFile(dir + "/sub/nested.txt") + assert.NoError(t, err) + assert.Equal(t, "nested", string(nested)) +} + +func TestExtract_BadTargetDir_Ugly(t *testing.T) { + srcDir := t.TempDir() + os.WriteFile(srcDir+"/f.txt", []byte("x"), 0644) + r := Extract(os.DirFS(srcDir), "/nonexistent/deeply/nested/impossible", nil) + // Should fail gracefully, not panic + _ = r +} + +func TestEmbed_PathTraversal_Ugly(t *testing.T) { + emb := Mount(testFS, "testdata").Value.(*Embed) + r := emb.ReadFile("../../etc/passwd") + assert.False(t, r.OK) +} + +func TestEmbed_Sub_BaseDir_Good(t *testing.T) { + emb := Mount(testFS, "testdata").Value.(*Embed) + r := emb.Sub("scantest") + assert.True(t, r.OK) + sub := r.Value.(*Embed) + assert.Equal(t, ".", sub.BaseDirectory()) +} + +func TestEmbed_Open_Bad(t *testing.T) { + emb := Mount(testFS, "testdata").Value.(*Embed) + r := emb.Open("nonexistent.txt") + assert.False(t, r.OK) +} + +func TestEmbed_ReadDir_Bad(t *testing.T) { + emb := Mount(testFS, "testdata").Value.(*Embed) + r := emb.ReadDir("nonexistent") + assert.False(t, r.OK) +} + +func TestEmbed_EmbedFS_Original_Good(t *testing.T) { + emb := Mount(testFS, "testdata").Value.(*Embed) + efs := emb.EmbedFS() + _, err := efs.ReadFile("testdata/test.txt") + assert.NoError(t, err) +} + +func TestExtract_NilData_Good(t *testing.T) { + dir := t.TempDir() + srcDir := t.TempDir() + os.WriteFile(srcDir+"/file.txt", []byte("no template"), 0644) + + r := Extract(os.DirFS(srcDir), dir, nil) + assert.True(t, r.OK) +} + +func mustCompress(input string) string { + var buf bytes.Buffer + b64 := base64.NewEncoder(base64.StdEncoding, &buf) + gz, _ := gzip.NewWriterLevel(b64, gzip.BestCompression) + gz.Write([]byte(input)) + gz.Close() + b64.Close() + return buf.String() +} diff --git a/error.go b/error.go new file mode 100644 index 0000000..d562494 --- /dev/null +++ b/error.go @@ -0,0 +1,395 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Structured errors, crash recovery, and reporting for the Core framework. +// Provides E() for error creation, Wrap()/WrapCode() for chaining, +// and Err for panic recovery and crash reporting. + +package core + +import ( + "encoding/json" + "errors" + "iter" + "maps" + "os" + "path/filepath" + "runtime" + "runtime/debug" + "sync" + "time" +) + +// ErrorSink is the shared interface for error reporting. +// Implemented by ErrorLog (structured logging) and ErrorPanic (panic recovery). +type ErrorSink interface { + Error(msg string, keyvals ...any) + Warn(msg string, keyvals ...any) +} + +var _ ErrorSink = (*Log)(nil) + +// Err represents a structured error with operational context. +// It implements the error interface and supports unwrapping. +type Err struct { + Operation string // Operation being performed (e.g., "user.Save") + Message string // Human-readable message + Cause error // Underlying error (optional) + Code string // Error code (optional, e.g., "VALIDATION_FAILED") +} + +// Error implements the error interface. +func (e *Err) Error() string { + var prefix string + if e.Operation != "" { + prefix = e.Operation + ": " + } + if e.Cause != nil { + if e.Code != "" { + return Concat(prefix, e.Message, " [", e.Code, "]: ", e.Cause.Error()) + } + return Concat(prefix, e.Message, ": ", e.Cause.Error()) + } + if e.Code != "" { + return Concat(prefix, e.Message, " [", e.Code, "]") + } + return Concat(prefix, e.Message) +} + +// Unwrap returns the underlying error for use with errors.Is and errors.As. +func (e *Err) Unwrap() error { + return e.Cause +} + +// --- Error Creation Functions --- + +// E creates a new Err with operation context. +// The underlying error can be nil for creating errors without a cause. +// +// Example: +// +// return log.E("user.Save", "failed to save user", err) +// return log.E("api.Call", "rate limited", nil) // No underlying cause +func E(op, msg string, err error) error { + return &Err{Operation: op, Message: msg, Cause: err} +} + +// Wrap wraps an error with operation context. +// Returns nil if err is nil, to support conditional wrapping. +// Preserves error Code if the wrapped error is an *Err. +// +// Example: +// +// return log.Wrap(err, "db.Query", "database query failed") +func Wrap(err error, op, msg string) error { + if err == nil { + return nil + } + // Preserve Code from wrapped *Err + var logErr *Err + if As(err, &logErr) && logErr.Code != "" { + return &Err{Operation: op, Message: msg, Cause: err, Code: logErr.Code} + } + return &Err{Operation: op, Message: msg, Cause: err} +} + +// WrapCode wraps an error with operation context and error code. +// Returns nil only if both err is nil AND code is empty. +// Useful for API errors that need machine-readable codes. +// +// Example: +// +// return log.WrapCode(err, "VALIDATION_ERROR", "user.Validate", "invalid email") +func WrapCode(err error, code, op, msg string) error { + if err == nil && code == "" { + return nil + } + return &Err{Operation: op, Message: msg, Cause: err, Code: code} +} + +// NewCode creates an error with just code and message (no underlying error). +// Useful for creating sentinel errors with codes. +// +// Example: +// +// var ErrNotFound = log.NewCode("NOT_FOUND", "resource not found") +func NewCode(code, msg string) error { + return &Err{Message: msg, Code: code} +} + +// --- Standard Library Wrappers --- + +// Is reports whether any error in err's tree matches target. +// Wrapper around errors.Is for convenience. +func Is(err, target error) bool { + return errors.Is(err, target) +} + +// As finds the first error in err's tree that matches target. +// Wrapper around errors.As for convenience. +func As(err error, target any) bool { + return errors.As(err, target) +} + +// NewError creates a simple error with the given text. +// Wrapper around errors.New for convenience. +func NewError(text string) error { + return errors.New(text) +} + +// ErrorJoin combines multiple errors into one. +// +// core.ErrorJoin(err1, err2, err3) +func ErrorJoin(errs ...error) error { + return errors.Join(errs...) +} + +// --- Error Introspection Helpers --- + +// Operation extracts the operation name from an error. +// Returns empty string if the error is not an *Err. +func Operation(err error) string { + var e *Err + if As(err, &e) { + return e.Operation + } + return "" +} + +// ErrorCode extracts the error code from an error. +// Returns empty string if the error is not an *Err or has no code. +func ErrorCode(err error) string { + var e *Err + if As(err, &e) { + return e.Code + } + return "" +} + +// Message extracts the message from an error. +// Returns the error's Error() string if not an *Err. +func ErrorMessage(err error) string { + if err == nil { + return "" + } + var e *Err + if As(err, &e) { + return e.Message + } + return err.Error() +} + +// Root returns the root cause of an error chain. +// Unwraps until no more wrapped errors are found. +func Root(err error) error { + if err == nil { + return nil + } + for { + unwrapped := errors.Unwrap(err) + if unwrapped == nil { + return err + } + err = unwrapped + } +} + +// AllOperations returns an iterator over all operational contexts in the error chain. +// It traverses the error tree using errors.Unwrap. +func AllOperations(err error) iter.Seq[string] { + return func(yield func(string) bool) { + for err != nil { + if e, ok := err.(*Err); ok { + if e.Operation != "" { + if !yield(e.Operation) { + return + } + } + } + err = errors.Unwrap(err) + } + } +} + +// StackTrace returns the logical stack trace (chain of operations) from an error. +// It returns an empty slice if no operational context is found. +func StackTrace(err error) []string { + var stack []string + for op := range AllOperations(err) { + stack = append(stack, op) + } + return stack +} + +// FormatStackTrace returns a pretty-printed logical stack trace. +func FormatStackTrace(err error) string { + var ops []string + for op := range AllOperations(err) { + ops = append(ops, op) + } + if len(ops) == 0 { + return "" + } + return Join(" -> ", ops...) +} + +// --- ErrorLog: Log-and-Return Error Helpers --- + +// ErrorLog combines error creation with logging. +// Primary action: return an error. Secondary: log it. +type ErrorLog struct { + log *Log +} + +func (el *ErrorLog) logger() *Log { + if el.log != nil { + return el.log + } + return Default() +} + +// Error logs at Error level and returns a Result with the wrapped error. +func (el *ErrorLog) Error(err error, op, msg string) Result { + if err == nil { + return Result{OK: true} + } + wrapped := Wrap(err, op, msg) + el.logger().Error(msg, "op", op, "err", err) + return Result{wrapped, false} +} + +// Warn logs at Warn level and returns a Result with the wrapped error. +func (el *ErrorLog) Warn(err error, op, msg string) Result { + if err == nil { + return Result{OK: true} + } + wrapped := Wrap(err, op, msg) + el.logger().Warn(msg, "op", op, "err", err) + return Result{wrapped, false} +} + +// Must logs and panics if err is not nil. +func (el *ErrorLog) Must(err error, op, msg string) { + if err != nil { + el.logger().Error(msg, "op", op, "err", err) + panic(Wrap(err, op, msg)) + } +} + +// --- Crash Recovery & Reporting --- + +// CrashReport represents a single crash event. +type CrashReport struct { + Timestamp time.Time `json:"timestamp"` + Error string `json:"error"` + Stack string `json:"stack"` + System CrashSystem `json:"system,omitempty"` + Meta map[string]string `json:"meta,omitempty"` +} + +// CrashSystem holds system information at crash time. +type CrashSystem struct { + OperatingSystem string `json:"operatingsystem"` + Architecture string `json:"architecture"` + Version string `json:"go_version"` +} + +// ErrorPanic manages panic recovery and crash reporting. +type ErrorPanic struct { + filePath string + meta map[string]string + onCrash func(CrashReport) +} + +// Recover captures a panic and creates a crash report. +// Use as: defer c.Error().Recover() +func (h *ErrorPanic) Recover() { + if h == nil { + return + } + r := recover() + if r == nil { + return + } + + err, ok := r.(error) + if !ok { + err = NewError(Sprint("panic: ", r)) + } + + report := CrashReport{ + Timestamp: time.Now(), + Error: err.Error(), + Stack: string(debug.Stack()), + System: CrashSystem{ + OperatingSystem: runtime.GOOS, + Architecture: runtime.GOARCH, + Version: runtime.Version(), + }, + Meta: maps.Clone(h.meta), + } + + if h.onCrash != nil { + h.onCrash(report) + } + + if h.filePath != "" { + h.appendReport(report) + } +} + +// SafeGo runs a function in a goroutine with panic recovery. +func (h *ErrorPanic) SafeGo(fn func()) { + go func() { + defer h.Recover() + fn() + }() +} + +// Reports returns the last n crash reports from the file. +func (h *ErrorPanic) Reports(n int) Result { + if h.filePath == "" { + return Result{} + } + crashMu.Lock() + defer crashMu.Unlock() + data, err := os.ReadFile(h.filePath) + if err != nil { + return Result{err, false} + } + var reports []CrashReport + if err := json.Unmarshal(data, &reports); err != nil { + return Result{err, false} + } + if n <= 0 || len(reports) <= n { + return Result{reports, true} + } + return Result{reports[len(reports)-n:], true} +} + +var crashMu sync.Mutex + +func (h *ErrorPanic) appendReport(report CrashReport) { + crashMu.Lock() + defer crashMu.Unlock() + + var reports []CrashReport + if data, err := os.ReadFile(h.filePath); err == nil { + if err := json.Unmarshal(data, &reports); err != nil { + reports = nil + } + } + + reports = append(reports, report) + data, err := json.MarshalIndent(reports, "", " ") + if err != nil { + Default().Error(Concat("crash report marshal failed: ", err.Error())) + return + } + if err := os.MkdirAll(filepath.Dir(h.filePath), 0755); err != nil { + Default().Error(Concat("crash report dir failed: ", err.Error())) + return + } + if err := os.WriteFile(h.filePath, data, 0600); err != nil { + Default().Error(Concat("crash report write failed: ", err.Error())) + } +} diff --git a/error_test.go b/error_test.go new file mode 100644 index 0000000..1f36264 --- /dev/null +++ b/error_test.go @@ -0,0 +1,272 @@ +package core_test + +import ( + "errors" + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- Error Creation --- + +func TestE_Good(t *testing.T) { + err := E("user.Save", "failed to save", nil) + assert.Error(t, err) + assert.Contains(t, err.Error(), "user.Save") + assert.Contains(t, err.Error(), "failed to save") +} + +func TestE_WithCause_Good(t *testing.T) { + cause := errors.New("connection refused") + err := E("db.Connect", "database unavailable", cause) + assert.ErrorIs(t, err, cause) +} + +func TestWrap_Good(t *testing.T) { + cause := errors.New("timeout") + err := Wrap(cause, "api.Call", "request failed") + assert.Error(t, err) + assert.ErrorIs(t, err, cause) +} + +func TestWrap_Nil_Good(t *testing.T) { + err := Wrap(nil, "api.Call", "request failed") + assert.Nil(t, err) +} + +func TestWrapCode_Good(t *testing.T) { + cause := errors.New("invalid email") + err := WrapCode(cause, "VALIDATION_ERROR", "user.Validate", "bad input") + assert.Error(t, err) + assert.Equal(t, "VALIDATION_ERROR", ErrorCode(err)) +} + +func TestNewCode_Good(t *testing.T) { + err := NewCode("NOT_FOUND", "resource not found") + assert.Error(t, err) + assert.Equal(t, "NOT_FOUND", ErrorCode(err)) +} + +// --- Error Introspection --- + +func TestOperation_Good(t *testing.T) { + err := E("brain.Recall", "search failed", nil) + assert.Equal(t, "brain.Recall", Operation(err)) +} + +func TestOperation_Bad(t *testing.T) { + err := errors.New("plain error") + assert.Equal(t, "", Operation(err)) +} + +func TestErrorMessage_Good(t *testing.T) { + err := E("op", "the message", nil) + assert.Equal(t, "the message", ErrorMessage(err)) +} + +func TestErrorMessage_Plain(t *testing.T) { + err := errors.New("plain") + assert.Equal(t, "plain", ErrorMessage(err)) +} + +func TestErrorMessage_Nil(t *testing.T) { + assert.Equal(t, "", ErrorMessage(nil)) +} + +func TestRoot_Good(t *testing.T) { + root := errors.New("root cause") + wrapped := Wrap(root, "layer1", "first wrap") + double := Wrap(wrapped, "layer2", "second wrap") + assert.Equal(t, root, Root(double)) +} + +func TestRoot_Nil(t *testing.T) { + assert.Nil(t, Root(nil)) +} + +func TestStackTrace_Good(t *testing.T) { + err := Wrap(E("inner", "cause", nil), "outer", "wrapper") + stack := StackTrace(err) + assert.Len(t, stack, 2) + assert.Equal(t, "outer", stack[0]) + assert.Equal(t, "inner", stack[1]) +} + +func TestFormatStackTrace_Good(t *testing.T) { + err := Wrap(E("a", "x", nil), "b", "y") + formatted := FormatStackTrace(err) + assert.Equal(t, "b -> a", formatted) +} + +// --- ErrorLog --- + +func TestErrorLog_Good(t *testing.T) { + c := New().Value.(*Core) + cause := errors.New("boom") + r := c.Log().Error(cause, "test.Operation", "something broke") + assert.False(t, r.OK) + assert.ErrorIs(t, r.Value.(error), cause) +} + +func TestErrorLog_Nil_Good(t *testing.T) { + c := New().Value.(*Core) + r := c.Log().Error(nil, "test.Operation", "no error") + assert.True(t, r.OK) +} + +func TestErrorLog_Warn_Good(t *testing.T) { + c := New().Value.(*Core) + cause := errors.New("warning") + r := c.Log().Warn(cause, "test.Operation", "heads up") + assert.False(t, r.OK) +} + +func TestErrorLog_Must_Ugly(t *testing.T) { + c := New().Value.(*Core) + assert.Panics(t, func() { + c.Log().Must(errors.New("fatal"), "test.Operation", "must fail") + }) +} + +func TestErrorLog_Must_Nil_Good(t *testing.T) { + c := New().Value.(*Core) + assert.NotPanics(t, func() { + c.Log().Must(nil, "test.Operation", "no error") + }) +} + +// --- ErrorPanic --- + +func TestErrorPanic_Recover_Good(t *testing.T) { + c := New().Value.(*Core) + // Should not panic — Recover catches it + assert.NotPanics(t, func() { + defer c.Error().Recover() + panic("test panic") + }) +} + +func TestErrorPanic_SafeGo_Good(t *testing.T) { + c := New().Value.(*Core) + done := make(chan bool, 1) + c.Error().SafeGo(func() { + done <- true + }) + assert.True(t, <-done) +} + +func TestErrorPanic_SafeGo_Panic_Good(t *testing.T) { + c := New().Value.(*Core) + done := make(chan bool, 1) + c.Error().SafeGo(func() { + defer func() { done <- true }() + panic("caught by SafeGo") + }) + // SafeGo recovers — goroutine completes without crashing the process + <-done +} + +// --- Standard Library Wrappers --- + +func TestIs_Good(t *testing.T) { + target := errors.New("target") + wrapped := Wrap(target, "op", "msg") + assert.True(t, Is(wrapped, target)) +} + +func TestAs_Good(t *testing.T) { + err := E("op", "msg", nil) + var e *Err + assert.True(t, As(err, &e)) + assert.Equal(t, "op", e.Operation) +} + +func TestNewError_Good(t *testing.T) { + err := NewError("simple error") + assert.Equal(t, "simple error", err.Error()) +} + +func TestErrorJoin_Good(t *testing.T) { + e1 := errors.New("first") + e2 := errors.New("second") + joined := ErrorJoin(e1, e2) + assert.ErrorIs(t, joined, e1) + assert.ErrorIs(t, joined, e2) +} + +// --- ErrorPanic Crash Reports --- + +func TestErrorPanic_Reports_Good(t *testing.T) { + dir := t.TempDir() + path := dir + "/crashes.json" + + // Create ErrorPanic with file output + c := New().Value.(*Core) + // Access internals via a crash that writes to file + // Since ErrorPanic fields are unexported, we test via Recover + _ = c + _ = path + // Crash reporting needs ErrorPanic configured with filePath — tested indirectly +} + +// --- ErrorPanic Crash File --- + +func TestErrorPanic_CrashFile_Good(t *testing.T) { + dir := t.TempDir() + path := dir + "/crashes.json" + + // Create Core, trigger a panic through SafeGo, check crash file + // ErrorPanic.filePath is unexported — but we can test via the package-level + // error handling that writes crash reports + + // For now, test that Reports handles missing file gracefully + c := New().Value.(*Core) + r := c.Error().Reports(5) + assert.False(t, r.OK) + assert.Nil(t, r.Value) + _ = path +} + +// --- Error formatting branches --- + +func TestErr_Error_WithCode_Good(t *testing.T) { + err := WrapCode(errors.New("bad"), "INVALID", "validate", "input failed") + assert.Contains(t, err.Error(), "[INVALID]") + assert.Contains(t, err.Error(), "validate") + assert.Contains(t, err.Error(), "bad") +} + +func TestErr_Error_CodeNoCause_Good(t *testing.T) { + err := NewCode("NOT_FOUND", "resource missing") + assert.Contains(t, err.Error(), "[NOT_FOUND]") + assert.Contains(t, err.Error(), "resource missing") +} + +func TestErr_Error_NoOp_Good(t *testing.T) { + err := &Err{Message: "bare error"} + assert.Equal(t, "bare error", err.Error()) +} + +func TestWrapCode_NilErr_EmptyCode_Good(t *testing.T) { + err := WrapCode(nil, "", "op", "msg") + assert.Nil(t, err) +} + +func TestWrap_PreservesCode_Good(t *testing.T) { + inner := WrapCode(errors.New("root"), "AUTH_FAIL", "auth", "denied") + outer := Wrap(inner, "handler", "request failed") + assert.Equal(t, "AUTH_FAIL", ErrorCode(outer)) +} + +func TestErrorLog_Warn_Nil_Good(t *testing.T) { + c := New().Value.(*Core) + r := c.LogWarn(nil, "op", "msg") + assert.True(t, r.OK) +} + +func TestErrorLog_Error_Nil_Good(t *testing.T) { + c := New().Value.(*Core) + r := c.LogError(nil, "op", "msg") + assert.True(t, r.OK) +} diff --git a/fs.go b/fs.go new file mode 100644 index 0000000..249ddaf --- /dev/null +++ b/fs.go @@ -0,0 +1,296 @@ +// Sandboxed local filesystem I/O for the Core framework. +package core + +import ( + "os" + "os/user" + "path/filepath" + "time" +) + +// Fs is a sandboxed local filesystem backend. +type Fs struct { + root string +} + +// path sanitises and returns the full path. +// Absolute paths are sandboxed under root (unless root is "/"). +// Empty root defaults to "/" — the zero value of Fs is usable. +func (m *Fs) path(p string) string { + root := m.root + if root == "" { + root = "/" + } + if p == "" { + return root + } + + // If the path is relative and the medium is rooted at "/", + // treat it as relative to the current working directory. + // This makes io.Local behave more like the standard 'os' package. + if root == "/" && !filepath.IsAbs(p) { + cwd, _ := os.Getwd() + return filepath.Join(cwd, p) + } + + // Use filepath.Clean with a leading slash to resolve all .. and . internally + // before joining with the root. This is a standard way to sandbox paths. + clean := filepath.Clean("/" + p) + + // If root is "/", allow absolute paths through + if root == "/" { + return clean + } + + // Strip leading "/" so Join works correctly with root + return filepath.Join(root, clean[1:]) +} + +// validatePath ensures the path is within the sandbox, following symlinks if they exist. +func (m *Fs) validatePath(p string) Result { + root := m.root + if root == "" { + root = "/" + } + if root == "/" { + return Result{m.path(p), true} + } + + // Split the cleaned path into components + parts := Split(filepath.Clean("/"+p), string(os.PathSeparator)) + current := root + + for _, part := range parts { + if part == "" { + continue + } + + next := filepath.Join(current, part) + realNext, err := filepath.EvalSymlinks(next) + if err != nil { + if os.IsNotExist(err) { + // Part doesn't exist, we can't follow symlinks anymore. + // Since the path is already Cleaned and current is safe, + // appending a component to current will not escape. + current = next + continue + } + return Result{err, false} + } + + // Verify the resolved part is still within the root + rel, err := filepath.Rel(root, realNext) + if err != nil || HasPrefix(rel, "..") { + // Security event: sandbox escape attempt + username := "unknown" + if u, err := user.Current(); err == nil { + username = u.Username + } + Print(os.Stderr, "[%s] SECURITY sandbox escape detected root=%s path=%s attempted=%s user=%s", + time.Now().Format(time.RFC3339), root, p, realNext, username) + if err == nil { + err = E("fs.validatePath", Concat("sandbox escape: ", p, " resolves outside ", m.root), nil) + } + return Result{err, false} + } + current = realNext + } + + return Result{current, true} +} + +// Read returns file contents as string. +func (m *Fs) Read(p string) Result { + vp := m.validatePath(p) + if !vp.OK { + return vp + } + data, err := os.ReadFile(vp.Value.(string)) + if err != nil { + return Result{err, false} + } + return Result{string(data), true} +} + +// Write saves content to file, creating parent directories as needed. +// Files are created with mode 0644. For sensitive files (keys, secrets), +// use WriteMode with 0600. +func (m *Fs) Write(p, content string) Result { + return m.WriteMode(p, content, 0644) +} + +// WriteMode saves content to file with explicit permissions. +// Use 0600 for sensitive files (encryption output, private keys, auth hashes). +func (m *Fs) WriteMode(p, content string, mode os.FileMode) Result { + vp := m.validatePath(p) + if !vp.OK { + return vp + } + full := vp.Value.(string) + if err := os.MkdirAll(filepath.Dir(full), 0755); err != nil { + return Result{err, false} + } + if err := os.WriteFile(full, []byte(content), mode); err != nil { + return Result{err, false} + } + return Result{OK: true} +} + +// EnsureDir creates directory if it doesn't exist. +func (m *Fs) EnsureDir(p string) Result { + vp := m.validatePath(p) + if !vp.OK { + return vp + } + if err := os.MkdirAll(vp.Value.(string), 0755); err != nil { + return Result{err, false} + } + return Result{OK: true} +} + +// IsDir returns true if path is a directory. +func (m *Fs) IsDir(p string) bool { + if p == "" { + return false + } + vp := m.validatePath(p) + if !vp.OK { + return false + } + info, err := os.Stat(vp.Value.(string)) + return err == nil && info.IsDir() +} + +// IsFile returns true if path is a regular file. +func (m *Fs) IsFile(p string) bool { + if p == "" { + return false + } + vp := m.validatePath(p) + if !vp.OK { + return false + } + info, err := os.Stat(vp.Value.(string)) + return err == nil && info.Mode().IsRegular() +} + +// Exists returns true if path exists. +func (m *Fs) Exists(p string) bool { + vp := m.validatePath(p) + if !vp.OK { + return false + } + _, err := os.Stat(vp.Value.(string)) + return err == nil +} + +// List returns directory entries. +func (m *Fs) List(p string) Result { + vp := m.validatePath(p) + if !vp.OK { + return vp + } + return Result{}.Result(os.ReadDir(vp.Value.(string))) +} + +// Stat returns file info. +func (m *Fs) Stat(p string) Result { + vp := m.validatePath(p) + if !vp.OK { + return vp + } + return Result{}.Result(os.Stat(vp.Value.(string))) +} + +// Open opens the named file for reading. +func (m *Fs) Open(p string) Result { + vp := m.validatePath(p) + if !vp.OK { + return vp + } + return Result{}.Result(os.Open(vp.Value.(string))) +} + +// Create creates or truncates the named file. +func (m *Fs) Create(p string) Result { + vp := m.validatePath(p) + if !vp.OK { + return vp + } + full := vp.Value.(string) + if err := os.MkdirAll(filepath.Dir(full), 0755); err != nil { + return Result{err, false} + } + return Result{}.Result(os.Create(full)) +} + +// Append opens the named file for appending, creating it if it doesn't exist. +func (m *Fs) Append(p string) Result { + vp := m.validatePath(p) + if !vp.OK { + return vp + } + full := vp.Value.(string) + if err := os.MkdirAll(filepath.Dir(full), 0755); err != nil { + return Result{err, false} + } + return Result{}.Result(os.OpenFile(full, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)) +} + +// ReadStream returns a reader for the file content. +func (m *Fs) ReadStream(path string) Result { + return m.Open(path) +} + +// WriteStream returns a writer for the file content. +func (m *Fs) WriteStream(path string) Result { + return m.Create(path) +} + +// Delete removes a file or empty directory. +func (m *Fs) Delete(p string) Result { + vp := m.validatePath(p) + if !vp.OK { + return vp + } + full := vp.Value.(string) + if full == "/" || full == os.Getenv("HOME") { + return Result{E("fs.Delete", Concat("refusing to delete protected path: ", full), nil), false} + } + if err := os.Remove(full); err != nil { + return Result{err, false} + } + return Result{OK: true} +} + +// DeleteAll removes a file or directory recursively. +func (m *Fs) DeleteAll(p string) Result { + vp := m.validatePath(p) + if !vp.OK { + return vp + } + full := vp.Value.(string) + if full == "/" || full == os.Getenv("HOME") { + return Result{E("fs.DeleteAll", Concat("refusing to delete protected path: ", full), nil), false} + } + if err := os.RemoveAll(full); err != nil { + return Result{err, false} + } + return Result{OK: true} +} + +// Rename moves a file or directory. +func (m *Fs) Rename(oldPath, newPath string) Result { + oldVp := m.validatePath(oldPath) + if !oldVp.OK { + return oldVp + } + newVp := m.validatePath(newPath) + if !newVp.OK { + return newVp + } + if err := os.Rename(oldVp.Value.(string), newVp.Value.(string)); err != nil { + return Result{err, false} + } + return Result{OK: true} +} diff --git a/fs_test.go b/fs_test.go new file mode 100644 index 0000000..7982802 --- /dev/null +++ b/fs_test.go @@ -0,0 +1,257 @@ +package core_test + +import ( + "io" + "io/fs" + "os" + "path/filepath" + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- Fs (Sandboxed Filesystem) --- + +func TestFs_WriteRead_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + + path := filepath.Join(dir, "test.txt") + assert.True(t, c.Fs().Write(path, "hello core").OK) + + r := c.Fs().Read(path) + assert.True(t, r.OK) + assert.Equal(t, "hello core", r.Value.(string)) +} + +func TestFs_Read_Bad(t *testing.T) { + c := New().Value.(*Core) + r := c.Fs().Read("/nonexistent/path/to/file.txt") + assert.False(t, r.OK) +} + +func TestFs_EnsureDir_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + path := filepath.Join(dir, "sub", "dir") + assert.True(t, c.Fs().EnsureDir(path).OK) + assert.True(t, c.Fs().IsDir(path)) +} + +func TestFs_IsDir_Good(t *testing.T) { + c := New().Value.(*Core) + dir := t.TempDir() + assert.True(t, c.Fs().IsDir(dir)) + assert.False(t, c.Fs().IsDir(filepath.Join(dir, "nonexistent"))) + assert.False(t, c.Fs().IsDir("")) +} + +func TestFs_IsFile_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + path := filepath.Join(dir, "test.txt") + c.Fs().Write(path, "data") + assert.True(t, c.Fs().IsFile(path)) + assert.False(t, c.Fs().IsFile(dir)) + assert.False(t, c.Fs().IsFile("")) +} + +func TestFs_Exists_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + path := filepath.Join(dir, "exists.txt") + c.Fs().Write(path, "yes") + assert.True(t, c.Fs().Exists(path)) + assert.True(t, c.Fs().Exists(dir)) + assert.False(t, c.Fs().Exists(filepath.Join(dir, "nope"))) +} + +func TestFs_List_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + c.Fs().Write(filepath.Join(dir, "a.txt"), "a") + c.Fs().Write(filepath.Join(dir, "b.txt"), "b") + r := c.Fs().List(dir) + assert.True(t, r.OK) + assert.Len(t, r.Value.([]fs.DirEntry), 2) +} + +func TestFs_Stat_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + path := filepath.Join(dir, "stat.txt") + c.Fs().Write(path, "data") + r := c.Fs().Stat(path) + assert.True(t, r.OK) + assert.Equal(t, "stat.txt", r.Value.(os.FileInfo).Name()) +} + +func TestFs_Open_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + path := filepath.Join(dir, "open.txt") + c.Fs().Write(path, "content") + r := c.Fs().Open(path) + assert.True(t, r.OK) + r.Value.(io.Closer).Close() +} + +func TestFs_Create_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + path := filepath.Join(dir, "sub", "created.txt") + r := c.Fs().Create(path) + assert.True(t, r.OK) + w := r.Value.(io.WriteCloser) + w.Write([]byte("hello")) + w.Close() + rr := c.Fs().Read(path) + assert.Equal(t, "hello", rr.Value.(string)) +} + +func TestFs_Append_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + path := filepath.Join(dir, "append.txt") + c.Fs().Write(path, "first") + r := c.Fs().Append(path) + assert.True(t, r.OK) + w := r.Value.(io.WriteCloser) + w.Write([]byte(" second")) + w.Close() + rr := c.Fs().Read(path) + assert.Equal(t, "first second", rr.Value.(string)) +} + +func TestFs_ReadStream_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + path := filepath.Join(dir, "stream.txt") + c.Fs().Write(path, "streamed") + r := c.Fs().ReadStream(path) + assert.True(t, r.OK) + r.Value.(io.Closer).Close() +} + +func TestFs_WriteStream_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + path := filepath.Join(dir, "sub", "ws.txt") + r := c.Fs().WriteStream(path) + assert.True(t, r.OK) + w := r.Value.(io.WriteCloser) + w.Write([]byte("stream")) + w.Close() +} + +func TestFs_Delete_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + path := filepath.Join(dir, "delete.txt") + c.Fs().Write(path, "gone") + assert.True(t, c.Fs().Delete(path).OK) + assert.False(t, c.Fs().Exists(path)) +} + +func TestFs_DeleteAll_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + sub := filepath.Join(dir, "deep", "nested") + c.Fs().EnsureDir(sub) + c.Fs().Write(filepath.Join(sub, "file.txt"), "data") + assert.True(t, c.Fs().DeleteAll(filepath.Join(dir, "deep")).OK) + assert.False(t, c.Fs().Exists(filepath.Join(dir, "deep"))) +} + +func TestFs_Rename_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + old := filepath.Join(dir, "old.txt") + nw := filepath.Join(dir, "new.txt") + c.Fs().Write(old, "data") + assert.True(t, c.Fs().Rename(old, nw).OK) + assert.False(t, c.Fs().Exists(old)) + assert.True(t, c.Fs().Exists(nw)) +} + +func TestFs_WriteMode_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + path := filepath.Join(dir, "secret.txt") + assert.True(t, c.Fs().WriteMode(path, "secret", 0600).OK) + r := c.Fs().Stat(path) + assert.True(t, r.OK) + assert.Equal(t, "secret.txt", r.Value.(os.FileInfo).Name()) +} + +// --- Zero Value --- + +func TestFs_ZeroValue_Good(t *testing.T) { + dir := t.TempDir() + zeroFs := &Fs{} + + path := filepath.Join(dir, "zero.txt") + assert.True(t, zeroFs.Write(path, "zero value works").OK) + r := zeroFs.Read(path) + assert.True(t, r.OK) + assert.Equal(t, "zero value works", r.Value.(string)) + assert.True(t, zeroFs.IsFile(path)) + assert.True(t, zeroFs.Exists(path)) + assert.True(t, zeroFs.IsDir(dir)) +} + +func TestFs_ZeroValue_List_Good(t *testing.T) { + dir := t.TempDir() + zeroFs := &Fs{} + + os.WriteFile(filepath.Join(dir, "a.txt"), []byte("a"), 0644) + r := zeroFs.List(dir) + assert.True(t, r.OK) + entries := r.Value.([]fs.DirEntry) + assert.Len(t, entries, 1) +} + +func TestFs_Exists_NotFound_Bad(t *testing.T) { + c := New().Value.(*Core) + assert.False(t, c.Fs().Exists("/nonexistent/path/xyz")) +} + +// --- Fs path/validatePath edge cases --- + +func TestFs_Read_EmptyPath_Ugly(t *testing.T) { + c := New().Value.(*Core) + r := c.Fs().Read("") + assert.False(t, r.OK) +} + +func TestFs_Write_EmptyPath_Ugly(t *testing.T) { + c := New().Value.(*Core) + r := c.Fs().Write("", "data") + assert.False(t, r.OK) +} + +func TestFs_Delete_Protected_Ugly(t *testing.T) { + c := New().Value.(*Core) + r := c.Fs().Delete("/") + assert.False(t, r.OK) +} + +func TestFs_DeleteAll_Protected_Ugly(t *testing.T) { + c := New().Value.(*Core) + r := c.Fs().DeleteAll("/") + assert.False(t, r.OK) +} + +func TestFs_ReadStream_WriteStream_Good(t *testing.T) { + dir := t.TempDir() + c := New().Value.(*Core) + path := filepath.Join(dir, "stream.txt") + c.Fs().Write(path, "streamed") + + r := c.Fs().ReadStream(path) + assert.True(t, r.OK) + + w := c.Fs().WriteStream(path) + assert.True(t, w.OK) +} diff --git a/go.mod b/go.mod index 1335493..e4be908 100644 --- a/go.mod +++ b/go.mod @@ -1,68 +1,14 @@ -module github.com/host-uk/core +module dappco.re/go/core -go 1.25.5 +go 1.26.0 + +require github.com/stretchr/testify v1.11.1 require ( - github.com/Snider/Borg v0.1.0 - github.com/getkin/kin-openapi v0.133.0 - github.com/leaanthony/debme v1.2.1 - github.com/leaanthony/gosod v1.0.4 - github.com/minio/selfupdate v0.6.0 - github.com/modelcontextprotocol/go-sdk v1.2.0 - github.com/oasdiff/oasdiff v1.11.8 - github.com/spf13/cobra v1.10.2 - github.com/stretchr/testify v1.11.1 - golang.org/x/mod v0.31.0 - golang.org/x/net v0.49.0 - golang.org/x/oauth2 v0.34.0 - golang.org/x/term v0.39.0 - golang.org/x/text v0.33.0 - gopkg.in/yaml.v3 v3.0.1 -) - -require ( - aead.dev/minisign v0.2.0 // indirect - cloud.google.com/go v0.123.0 // indirect - dario.cat/mergo v1.0.0 // indirect - github.com/Microsoft/go-winio v0.6.2 // indirect - github.com/ProtonMail/go-crypto v1.3.0 // indirect - github.com/TwiN/go-color v1.4.1 // indirect - github.com/cloudflare/circl v1.6.1 // indirect - github.com/cyphar/filepath-securejoin v0.4.1 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect - github.com/emirpasic/gods v1.18.1 // indirect - github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect - github.com/go-git/go-billy/v5 v5.6.2 // indirect - github.com/go-git/go-git/v5 v5.16.3 // indirect - github.com/go-openapi/jsonpointer v0.21.0 // indirect - github.com/go-openapi/swag v0.23.0 // indirect - github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect - github.com/google/jsonschema-go v0.3.0 // indirect - github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect - github.com/josharian/intern v1.0.0 // indirect - github.com/kevinburke/ssh_config v1.2.0 // indirect - github.com/mailru/easyjson v0.9.0 // indirect - github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect - github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect - github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect - github.com/perimeterx/marshmallow v1.1.5 // indirect - github.com/pjbgf/sha1cd v0.3.2 // indirect + github.com/kr/pretty v0.3.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect - github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect - github.com/skeema/knownhosts v1.3.1 // indirect - github.com/spf13/pflag v1.0.10 // indirect - github.com/tidwall/gjson v1.18.0 // indirect - github.com/tidwall/match v1.1.1 // indirect - github.com/tidwall/pretty v1.2.1 // indirect - github.com/tidwall/sjson v1.2.5 // indirect - github.com/wI2L/jsondiff v0.7.0 // indirect - github.com/woodsbury/decimal128 v1.3.0 // indirect - github.com/xanzy/ssh-agent v0.3.3 // indirect - github.com/yargevad/filepathx v1.0.0 // indirect - github.com/yosida95/uritemplate/v3 v3.0.2 // indirect - golang.org/x/crypto v0.47.0 // indirect - golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect - golang.org/x/sys v0.40.0 // indirect - gopkg.in/warnings.v0 v0.1.2 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 8cb0114..5a10c39 100644 --- a/go.sum +++ b/go.sum @@ -1,196 +1,23 @@ -aead.dev/minisign v0.2.0 h1:kAWrq/hBRu4AARY6AlciO83xhNnW9UaC8YipS2uhLPk= -aead.dev/minisign v0.2.0/go.mod h1:zdq6LdSd9TbuSxchxwhpA9zEb9YXcVGoE8JakuiGaIQ= -cloud.google.com/go v0.123.0 h1:2NAUJwPR47q+E35uaJeYoNhuNEM9kM8SjgRgdeOJUSE= -cloud.google.com/go v0.123.0/go.mod h1:xBoMV08QcqUGuPW65Qfm1o9Y4zKZBpGS+7bImXLTAZU= -dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= -dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= -github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= -github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= -github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= -github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw= -github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= -github.com/Snider/Borg v0.1.0 h1:tLvrytPMIM2To0xByYP+KHLcT9pg9P9y9uRTyG6r9oc= -github.com/Snider/Borg v0.1.0/go.mod h1:0GMzdXYzdFZpR25IFne7ErqV/YFQHsX1THm1BbncMPo= -github.com/TwiN/go-color v1.4.1 h1:mqG0P/KBgHKVqmtL5ye7K0/Gr4l6hTksPgTgMk3mUzc= -github.com/TwiN/go-color v1.4.1/go.mod h1:WcPf/jtiW95WBIsEeY1Lc/b8aaWoiqQpu5cf8WFxu+s= -github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= -github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= -github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= -github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= -github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= -github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= -github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= -github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= -github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o= -github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= -github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= -github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= -github.com/getkin/kin-openapi v0.133.0 h1:pJdmNohVIJ97r4AUFtEXRXwESr8b0bD721u/Tz6k8PQ= -github.com/getkin/kin-openapi v0.133.0/go.mod h1:boAciF6cXk5FhPqe/NQeBTeenbjqU4LhWBf09ILVvWE= -github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= -github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU= -github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= -github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= -github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM= -github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= -github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= -github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= -github.com/go-git/go-git/v5 v5.16.3 h1:Z8BtvxZ09bYm/yYNgPKCzgWtaRqDTgIKRgIRHBfU6Z8= -github.com/go-git/go-git/v5 v5.16.3/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8= -github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= -github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= -github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= -github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= -github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= -github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= -github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= -github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= -github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= -github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= -github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= -github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= -github.com/google/jsonschema-go v0.3.0 h1:6AH2TxVNtk3IlvkkhjrtbUc4S8AvO0Xii0DxIygDg+Q= -github.com/google/jsonschema-go v0.3.0/go.mod h1:r5quNTdLOYEz95Ru18zA0ydNbBuYoo9tgaYcxEYhJVE= -github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= -github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= -github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= -github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= -github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= -github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= -github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/leaanthony/debme v1.2.1 h1:9Tgwf+kjcrbMQ4WnPcEIUcQuIZYqdWftzZkBr+i/oOc= -github.com/leaanthony/debme v1.2.1/go.mod h1:3V+sCm5tYAgQymvSOfYQ5Xx2JCr+OXiD9Jkw3otUjiA= -github.com/leaanthony/gosod v1.0.4 h1:YLAbVyd591MRffDgxUOU1NwLhT9T1/YiwjKZpkNFeaI= -github.com/leaanthony/gosod v1.0.4/go.mod h1:GKuIL0zzPj3O1SdWQOdgURSuhkF+Urizzxh26t9f1cw= -github.com/leaanthony/slicer v1.5.0/go.mod h1:FwrApmf8gOrpzEWM2J/9Lh79tyq8KTX5AzRtwV7m4AY= -github.com/leaanthony/slicer v1.6.0 h1:1RFP5uiPJvT93TAHi+ipd3NACobkW53yUiBqZheE/Js= -github.com/leaanthony/slicer v1.6.0/go.mod h1:o/Iz29g7LN0GqH3aMjWAe90381nyZlDNquK+mtH2Fj8= -github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4= -github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU= -github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= -github.com/matryer/is v1.4.1 h1:55ehd8zaGABKLXQUe2awZ99BD/PTc2ls+KV/dXphgEQ= -github.com/matryer/is v1.4.1/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= -github.com/minio/selfupdate v0.6.0 h1:i76PgT0K5xO9+hjzKcacQtO7+MjJ4JKA8Ak8XQ9DDwU= -github.com/minio/selfupdate v0.6.0/go.mod h1:bO02GTIPCMQFTEvE5h4DjYB58bCoZ35XLeBf0buTDdM= -github.com/modelcontextprotocol/go-sdk v1.2.0 h1:Y23co09300CEk8iZ/tMxIX1dVmKZkzoSBZOpJwUnc/s= -github.com/modelcontextprotocol/go-sdk v1.2.0/go.mod h1:6fM3LCm3yV7pAs8isnKLn07oKtB0MP9LHd3DfAcKw10= -github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= -github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= -github.com/oasdiff/oasdiff v1.11.8 h1:3LalSR0yYVM5sAYNInlIG4TVckLCJBkgjcnst2GKWVg= -github.com/oasdiff/oasdiff v1.11.8/go.mod h1:YtP/1VnQo8FCdSWGJ11a98HFgLnFvUffH//FTDuEpls= -github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 h1:G7ERwszslrBzRxj//JalHPu/3yz+De2J+4aLtSRlHiY= -github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037/go.mod h1:2bpvgLBZEtENV5scfDFEtB/5+1M4hkQhDQrccEJ/qGw= -github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 h1:bQx3WeLcUWy+RletIKwUIt4x3t8n2SxavmoclizMb8c= -github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90/go.mod h1:y5+oSEHCPT/DGrS++Wc/479ERge0zTFxaF8PbGKcg2o= -github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k= -github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY= -github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= -github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= -github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= -github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= -github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= -github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= -github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8= -github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY= -github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU= -github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4= -github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= -github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= -github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= -github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= -github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= -github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= -github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= -github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= -github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= -github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= -github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= -github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= -github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= -github.com/wI2L/jsondiff v0.7.0 h1:1lH1G37GhBPqCfp/lrs91rf/2j3DktX6qYAKZkLuCQQ= -github.com/wI2L/jsondiff v0.7.0/go.mod h1:KAEIojdQq66oJiHhDyQez2x+sRit0vIzC9KeK0yizxM= -github.com/woodsbury/decimal128 v1.3.0 h1:8pffMNWIlC0O5vbyHWFZAt5yWvWcrHA+3ovIIjVWss0= -github.com/woodsbury/decimal128 v1.3.0/go.mod h1:C5UTmyTjW3JftjUFzOVhC20BEQa2a4ZKOB5I6Zjb+ds= -github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= -github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= -github.com/yargevad/filepathx v1.0.0 h1:SYcT+N3tYGi+NvazubCNlvgIPbzAk7i7y2dwg3I5FYc= -github.com/yargevad/filepathx v1.0.0/go.mod h1:BprfX/gpYNJHJfc35GjRRpVcwWXS89gGulUIU5tK3tA= -github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4= -github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4= -go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.0.0-20211209193657-4570a0811e8b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= -golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= -golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw= -golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM= -golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI= -golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= -golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= -golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw= -golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210228012217-479acdf4ea46/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= -golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= -golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= -golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= -golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA= -golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= -gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/i18n.go b/i18n.go new file mode 100644 index 0000000..7061ce8 --- /dev/null +++ b/i18n.go @@ -0,0 +1,138 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Internationalisation for the Core framework. +// I18n collects locale mounts from services and delegates +// translation to a registered Translator implementation (e.g., go-i18n). + +package core + +import ( + "sync" +) + +// Translator defines the interface for translation services. +// Implemented by go-i18n's Srv. +type Translator interface { + // Translate translates a message by its ID with optional arguments. + Translate(messageID string, args ...any) Result + // SetLanguage sets the active language (BCP47 tag, e.g., "en-GB", "de"). + SetLanguage(lang string) error + // Language returns the current language code. + Language() string + // AvailableLanguages returns all loaded language codes. + AvailableLanguages() []string +} + +// LocaleProvider is implemented by services that ship their own translation files. +// Core discovers this interface during service registration and collects the +// locale mounts. The i18n service loads them during startup. +// +// Usage in a service package: +// +// //go:embed locales +// var localeFS embed.FS +// +// func (s *MyService) Locales() *Embed { +// m, _ := Mount(localeFS, "locales") +// return m +// } +type LocaleProvider interface { + Locales() *Embed +} + +// I18n manages locale collection and translation dispatch. +type I18n struct { + mu sync.RWMutex + locales []*Embed // collected from LocaleProvider services + locale string + translator Translator // registered implementation (nil until set) +} + +// AddLocales adds locale mounts (called during service registration). +func (i *I18n) AddLocales(mounts ...*Embed) { + i.mu.Lock() + i.locales = append(i.locales, mounts...) + i.mu.Unlock() +} + +// Locales returns all collected locale mounts. +func (i *I18n) Locales() Result { + i.mu.RLock() + out := make([]*Embed, len(i.locales)) + copy(out, i.locales) + i.mu.RUnlock() + return Result{out, true} +} + +// SetTranslator registers the translation implementation. +// Called by go-i18n's Srv during startup. +func (i *I18n) SetTranslator(t Translator) { + i.mu.Lock() + i.translator = t + locale := i.locale + i.mu.Unlock() + if t != nil && locale != "" { + _ = t.SetLanguage(locale) + } +} + +// Translator returns the registered translation implementation, or nil. +func (i *I18n) Translator() Result { + i.mu.RLock() + t := i.translator + i.mu.RUnlock() + if t == nil { + return Result{} + } + return Result{t, true} +} + +// Translate translates a message. Returns the key as-is if no translator is registered. +func (i *I18n) Translate(messageID string, args ...any) Result { + i.mu.RLock() + t := i.translator + i.mu.RUnlock() + if t != nil { + return t.Translate(messageID, args...) + } + return Result{messageID, true} +} + +// SetLanguage sets the active language and forwards to the translator if registered. +func (i *I18n) SetLanguage(lang string) Result { + if lang == "" { + return Result{OK: true} + } + i.mu.Lock() + i.locale = lang + t := i.translator + i.mu.Unlock() + if t != nil { + if err := t.SetLanguage(lang); err != nil { + return Result{err, false} + } + } + return Result{OK: true} +} + +// Language returns the current language code, or "en" if not set. +func (i *I18n) Language() string { + i.mu.RLock() + locale := i.locale + i.mu.RUnlock() + if locale != "" { + return locale + } + return "en" +} + +// AvailableLanguages returns all loaded language codes. +func (i *I18n) AvailableLanguages() []string { + i.mu.RLock() + t := i.translator + i.mu.RUnlock() + if t != nil { + return t.AvailableLanguages() + } + return []string{"en"} +} diff --git a/i18n_test.go b/i18n_test.go new file mode 100644 index 0000000..3e6d8ca --- /dev/null +++ b/i18n_test.go @@ -0,0 +1,96 @@ +package core_test + +import ( + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- I18n --- + +func TestI18n_Good(t *testing.T) { + c := New().Value.(*Core) + assert.NotNil(t, c.I18n()) +} + +func TestI18n_AddLocales_Good(t *testing.T) { + c := New().Value.(*Core) + r := c.Data().New(Options{ + {Key: "name", Value: "lang"}, + {Key: "source", Value: testFS}, + {Key: "path", Value: "testdata"}, + }) + if r.OK { + c.I18n().AddLocales(r.Value.(*Embed)) + } + r2 := c.I18n().Locales() + assert.True(t, r2.OK) + assert.Len(t, r2.Value.([]*Embed), 1) +} + +func TestI18n_Locales_Empty_Good(t *testing.T) { + c := New().Value.(*Core) + r := c.I18n().Locales() + assert.True(t, r.OK) + assert.Empty(t, r.Value.([]*Embed)) +} + +// --- Translator (no translator registered) --- + +func TestI18n_Translate_NoTranslator_Good(t *testing.T) { + c := New().Value.(*Core) + // Without a translator, Translate returns the key as-is + r := c.I18n().Translate("greeting.hello") + assert.True(t, r.OK) + assert.Equal(t, "greeting.hello", r.Value) +} + +func TestI18n_SetLanguage_NoTranslator_Good(t *testing.T) { + c := New().Value.(*Core) + r := c.I18n().SetLanguage("de") + assert.True(t, r.OK) // no-op without translator +} + +func TestI18n_Language_NoTranslator_Good(t *testing.T) { + c := New().Value.(*Core) + assert.Equal(t, "en", c.I18n().Language()) +} + +func TestI18n_AvailableLanguages_NoTranslator_Good(t *testing.T) { + c := New().Value.(*Core) + langs := c.I18n().AvailableLanguages() + assert.Equal(t, []string{"en"}, langs) +} + +func TestI18n_Translator_Nil_Good(t *testing.T) { + c := New().Value.(*Core) + assert.False(t, c.I18n().Translator().OK) +} + +// --- Translator (with mock) --- + +type mockTranslator struct { + lang string +} + +func (m *mockTranslator) Translate(id string, args ...any) Result { + return Result{"translated:" + id, true} +} +func (m *mockTranslator) SetLanguage(lang string) error { m.lang = lang; return nil } +func (m *mockTranslator) Language() string { return m.lang } +func (m *mockTranslator) AvailableLanguages() []string { return []string{"en", "de", "fr"} } + +func TestI18n_WithTranslator_Good(t *testing.T) { + c := New().Value.(*Core) + tr := &mockTranslator{lang: "en"} + c.I18n().SetTranslator(tr) + + assert.Equal(t, tr, c.I18n().Translator().Value) + assert.Equal(t, "translated:hello", c.I18n().Translate("hello").Value) + assert.Equal(t, "en", c.I18n().Language()) + assert.Equal(t, []string{"en", "de", "fr"}, c.I18n().AvailableLanguages()) + + c.I18n().SetLanguage("de") + assert.Equal(t, "de", c.I18n().Language()) +} diff --git a/info.go b/info.go new file mode 100644 index 0000000..1a4ae43 --- /dev/null +++ b/info.go @@ -0,0 +1,134 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// System information registry for the Core framework. +// Read-only key-value store of environment facts, populated once at init. +// Env is environment. Config is ours. +// +// System keys: +// +// core.Env("OS") // "darwin" +// core.Env("ARCH") // "arm64" +// core.Env("GO") // "go1.26" +// core.Env("DS") // "/" (directory separator) +// core.Env("PS") // ":" (path list separator) +// core.Env("HOSTNAME") // "cladius" +// core.Env("USER") // "snider" +// core.Env("PID") // "12345" +// core.Env("NUM_CPU") // "10" +// +// Directory keys: +// +// core.Env("DIR_HOME") // "/Users/snider" +// core.Env("DIR_CONFIG") // "~/Library/Application Support" +// core.Env("DIR_CACHE") // "~/Library/Caches" +// core.Env("DIR_DATA") // "~/Library" (platform-specific) +// core.Env("DIR_TMP") // "/tmp" +// core.Env("DIR_CWD") // current working directory +// core.Env("DIR_DOWNLOADS") // "~/Downloads" +// core.Env("DIR_CODE") // "~/Code" +// +// Timestamp keys: +// +// core.Env("CORE_START") // "2026-03-22T14:30:00Z" +package core + +import ( + "os" + "runtime" + "strconv" + "time" +) + +// SysInfo holds read-only system information, populated once at init. +type SysInfo struct { + values map[string]string +} + +// systemInfo is declared empty — populated in init() so Path() can be used +// without creating an init cycle. +var systemInfo = &SysInfo{values: make(map[string]string)} + +func init() { + i := systemInfo + + // System + i.values["OS"] = runtime.GOOS + i.values["ARCH"] = runtime.GOARCH + i.values["GO"] = runtime.Version() + i.values["DS"] = string(os.PathSeparator) + i.values["PS"] = string(os.PathListSeparator) + i.values["PID"] = strconv.Itoa(os.Getpid()) + i.values["NUM_CPU"] = strconv.Itoa(runtime.NumCPU()) + i.values["USER"] = Username() + + if h, err := os.Hostname(); err == nil { + i.values["HOSTNAME"] = h + } + + // Directories — DS and DIR_HOME set first so Path() can use them. + // CORE_HOME overrides os.UserHomeDir() (e.g., agent workspaces). + if d := os.Getenv("CORE_HOME"); d != "" { + i.values["DIR_HOME"] = d + } else if d, err := os.UserHomeDir(); err == nil { + i.values["DIR_HOME"] = d + } + + // Derived directories via Path() — single point of responsibility + i.values["DIR_DOWNLOADS"] = Path("Downloads") + i.values["DIR_CODE"] = Path("Code") + if d, err := os.UserConfigDir(); err == nil { + i.values["DIR_CONFIG"] = d + } + if d, err := os.UserCacheDir(); err == nil { + i.values["DIR_CACHE"] = d + } + i.values["DIR_TMP"] = os.TempDir() + if d, err := os.Getwd(); err == nil { + i.values["DIR_CWD"] = d + } + + // Platform-specific data directory + switch runtime.GOOS { + case "darwin": + i.values["DIR_DATA"] = Path(Env("DIR_HOME"), "Library") + case "windows": + if d := os.Getenv("LOCALAPPDATA"); d != "" { + i.values["DIR_DATA"] = d + } + default: + if xdg := os.Getenv("XDG_DATA_HOME"); xdg != "" { + i.values["DIR_DATA"] = xdg + } else if Env("DIR_HOME") != "" { + i.values["DIR_DATA"] = Path(Env("DIR_HOME"), ".local", "share") + } + } + + // Timestamps + i.values["CORE_START"] = time.Now().UTC().Format(time.RFC3339) +} + +// Env returns a system information value by key. +// Core keys (OS, DIR_HOME, DS, etc.) are pre-populated at init. +// Unknown keys fall through to os.Getenv — making Env a universal +// replacement for os.Getenv. +// +// core.Env("OS") // "darwin" (pre-populated) +// core.Env("DIR_HOME") // "/Users/snider" (pre-populated) +// core.Env("FORGE_TOKEN") // falls through to os.Getenv +func Env(key string) string { + if v := systemInfo.values[key]; v != "" { + return v + } + return os.Getenv(key) +} + +// EnvKeys returns all available environment keys. +// +// keys := core.EnvKeys() +func EnvKeys() []string { + keys := make([]string, 0, len(systemInfo.values)) + for k := range systemInfo.values { + keys = append(keys, k) + } + return keys +} diff --git a/info_test.go b/info_test.go new file mode 100644 index 0000000..2a03369 --- /dev/null +++ b/info_test.go @@ -0,0 +1,103 @@ +// SPDX-License-Identifier: EUPL-1.2 + +package core_test + +import ( + "os" + "runtime" + "testing" + "time" + + core "dappco.re/go/core" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestEnv_OS(t *testing.T) { + assert.Equal(t, runtime.GOOS, core.Env("OS")) +} + +func TestEnv_ARCH(t *testing.T) { + assert.Equal(t, runtime.GOARCH, core.Env("ARCH")) +} + +func TestEnv_GO(t *testing.T) { + assert.Equal(t, runtime.Version(), core.Env("GO")) +} + +func TestEnv_DS(t *testing.T) { + assert.Equal(t, string(os.PathSeparator), core.Env("DS")) +} + +func TestEnv_PS(t *testing.T) { + assert.Equal(t, string(os.PathListSeparator), core.Env("PS")) +} + +func TestEnv_DIR_HOME(t *testing.T) { + if ch := os.Getenv("CORE_HOME"); ch != "" { + assert.Equal(t, ch, core.Env("DIR_HOME")) + return + } + home, err := os.UserHomeDir() + require.NoError(t, err) + assert.Equal(t, home, core.Env("DIR_HOME")) +} + +func TestEnv_DIR_TMP(t *testing.T) { + assert.Equal(t, os.TempDir(), core.Env("DIR_TMP")) +} + +func TestEnv_DIR_CONFIG(t *testing.T) { + cfg, err := os.UserConfigDir() + require.NoError(t, err) + assert.Equal(t, cfg, core.Env("DIR_CONFIG")) +} + +func TestEnv_DIR_CACHE(t *testing.T) { + cache, err := os.UserCacheDir() + require.NoError(t, err) + assert.Equal(t, cache, core.Env("DIR_CACHE")) +} + +func TestEnv_HOSTNAME(t *testing.T) { + hostname, err := os.Hostname() + require.NoError(t, err) + assert.Equal(t, hostname, core.Env("HOSTNAME")) +} + +func TestEnv_USER(t *testing.T) { + assert.NotEmpty(t, core.Env("USER")) +} + +func TestEnv_PID(t *testing.T) { + assert.NotEmpty(t, core.Env("PID")) +} + +func TestEnv_NUM_CPU(t *testing.T) { + assert.NotEmpty(t, core.Env("NUM_CPU")) +} + +func TestEnv_CORE_START(t *testing.T) { + ts := core.Env("CORE_START") + require.NotEmpty(t, ts) + _, err := time.Parse(time.RFC3339, ts) + assert.NoError(t, err, "CORE_START should be valid RFC3339") +} + +func TestEnv_Unknown(t *testing.T) { + assert.Equal(t, "", core.Env("NOPE")) +} + +func TestEnv_CoreInstance(t *testing.T) { + c := core.New().Value.(*core.Core) + assert.Equal(t, core.Env("OS"), c.Env("OS")) + assert.Equal(t, core.Env("DIR_HOME"), c.Env("DIR_HOME")) +} + +func TestEnvKeys(t *testing.T) { + keys := core.EnvKeys() + assert.NotEmpty(t, keys) + assert.Contains(t, keys, "OS") + assert.Contains(t, keys, "DIR_HOME") + assert.Contains(t, keys, "CORE_START") +} diff --git a/internal/tools/i18n-validate/main.go b/internal/tools/i18n-validate/main.go deleted file mode 100644 index 817759e..0000000 --- a/internal/tools/i18n-validate/main.go +++ /dev/null @@ -1,524 +0,0 @@ -// Command i18n-validate scans Go source files for i18n key usage and validates -// them against the locale JSON files. -// -// Usage: -// -// go run ./cmd/i18n-validate ./... -// go run ./cmd/i18n-validate ./pkg/cli ./cmd/dev -// -// The validator checks: -// - T("key") calls - validates key exists in locale files -// - C("intent", ...) calls - validates intent exists in registered intents -// - i18n.T("key") and i18n.C("intent", ...) qualified calls -// -// Exit codes: -// - 0: All keys valid -// - 1: Missing keys found -// - 2: Error during validation -package main - -import ( - "encoding/json" - "fmt" - "go/ast" - "go/parser" - "go/token" - "os" - "path/filepath" - "sort" - "strings" -) - -// KeyUsage records where a key is used in the source code. -type KeyUsage struct { - Key string - File string - Line int - Function string // "T" or "C" -} - -// ValidationResult holds the results of validation. -type ValidationResult struct { - TotalKeys int - ValidKeys int - MissingKeys []KeyUsage - IntentKeys int - MessageKeys int -} - -func main() { - if len(os.Args) < 2 { - fmt.Fprintln(os.Stderr, "Usage: i18n-validate ") - fmt.Fprintln(os.Stderr, "Example: i18n-validate ./...") - os.Exit(2) - } - - // Find the project root (where locales are) - root, err := findProjectRoot() - if err != nil { - fmt.Fprintf(os.Stderr, "Error finding project root: %v\n", err) - os.Exit(2) - } - - // Load valid keys from locale files - validKeys, err := loadValidKeys(filepath.Join(root, "pkg/i18n/locales")) - if err != nil { - fmt.Fprintf(os.Stderr, "Error loading locale files: %v\n", err) - os.Exit(2) - } - - // Load valid intents - validIntents := loadValidIntents() - - // Scan source files - usages, err := scanPackages(os.Args[1:]) - if err != nil { - fmt.Fprintf(os.Stderr, "Error scanning packages: %v\n", err) - os.Exit(2) - } - - // Validate - result := validate(usages, validKeys, validIntents) - - // Report - printReport(result) - - if len(result.MissingKeys) > 0 { - os.Exit(1) - } -} - -// findProjectRoot finds the project root by looking for go.mod. -func findProjectRoot() (string, error) { - dir, err := os.Getwd() - if err != nil { - return "", err - } - - for { - if _, err := os.Stat(filepath.Join(dir, "go.mod")); err == nil { - return dir, nil - } - parent := filepath.Dir(dir) - if parent == dir { - return "", fmt.Errorf("could not find go.mod in any parent directory") - } - dir = parent - } -} - -// loadValidKeys loads all valid keys from locale JSON files. -func loadValidKeys(localesDir string) (map[string]bool, error) { - keys := make(map[string]bool) - - entries, err := os.ReadDir(localesDir) - if err != nil { - return nil, fmt.Errorf("reading locales dir: %w", err) - } - - for _, entry := range entries { - if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".json") { - continue - } - - data, err := os.ReadFile(filepath.Join(localesDir, entry.Name())) - if err != nil { - return nil, fmt.Errorf("reading %s: %w", entry.Name(), err) - } - - var raw map[string]any - if err := json.Unmarshal(data, &raw); err != nil { - return nil, fmt.Errorf("parsing %s: %w", entry.Name(), err) - } - - extractKeys("", raw, keys) - } - - return keys, nil -} - -// extractKeys recursively extracts flattened keys from nested JSON. -func extractKeys(prefix string, data map[string]any, out map[string]bool) { - for key, value := range data { - fullKey := key - if prefix != "" { - fullKey = prefix + "." + key - } - - switch v := value.(type) { - case string: - out[fullKey] = true - case map[string]any: - // Check if it's a plural/verb/noun object (has specific keys) - if isPluralOrGrammarObject(v) { - out[fullKey] = true - } else { - extractKeys(fullKey, v, out) - } - } - } -} - -// isPluralOrGrammarObject checks if a map is a leaf object (plural forms, verb forms, etc). -func isPluralOrGrammarObject(m map[string]any) bool { - // CLDR plural keys - _, hasOne := m["one"] - _, hasOther := m["other"] - _, hasZero := m["zero"] - _, hasTwo := m["two"] - _, hasFew := m["few"] - _, hasMany := m["many"] - - // Grammar keys - _, hasPast := m["past"] - _, hasGerund := m["gerund"] - _, hasGender := m["gender"] - _, hasBase := m["base"] - - // Article keys - _, hasDefault := m["default"] - _, hasVowel := m["vowel"] - - if hasOne || hasOther || hasZero || hasTwo || hasFew || hasMany { - return true - } - if hasPast || hasGerund || hasGender || hasBase { - return true - } - if hasDefault || hasVowel { - return true - } - - return false -} - -// loadValidIntents returns the set of valid intent keys. -func loadValidIntents() map[string]bool { - // Core intents - these match what's defined in intents.go - return map[string]bool{ - // Destructive - "core.delete": true, - "core.remove": true, - "core.discard": true, - "core.reset": true, - "core.overwrite": true, - // Creation - "core.create": true, - "core.add": true, - "core.clone": true, - "core.copy": true, - // Modification - "core.save": true, - "core.update": true, - "core.rename": true, - "core.move": true, - // Git - "core.commit": true, - "core.push": true, - "core.pull": true, - "core.merge": true, - "core.rebase": true, - // Network - "core.install": true, - "core.download": true, - "core.upload": true, - "core.publish": true, - "core.deploy": true, - // Process - "core.start": true, - "core.stop": true, - "core.restart": true, - "core.run": true, - "core.build": true, - "core.test": true, - // Information - "core.continue": true, - "core.proceed": true, - "core.confirm": true, - // Additional - "core.sync": true, - "core.boot": true, - "core.format": true, - "core.analyse": true, - "core.link": true, - "core.unlink": true, - "core.fetch": true, - "core.generate": true, - "core.validate": true, - "core.check": true, - "core.scan": true, - } -} - -// scanPackages scans Go packages for i18n key usage. -func scanPackages(patterns []string) ([]KeyUsage, error) { - var usages []KeyUsage - - for _, pattern := range patterns { - // Expand pattern - matches, err := expandPattern(pattern) - if err != nil { - return nil, fmt.Errorf("expanding pattern %q: %w", pattern, err) - } - - for _, dir := range matches { - dirUsages, err := scanDirectory(dir) - if err != nil { - return nil, fmt.Errorf("scanning %s: %w", dir, err) - } - usages = append(usages, dirUsages...) - } - } - - return usages, nil -} - -// expandPattern expands a Go package pattern to directories. -func expandPattern(pattern string) ([]string, error) { - // Handle ./... or ... pattern - if strings.HasSuffix(pattern, "...") { - base := strings.TrimSuffix(pattern, "...") - base = strings.TrimSuffix(base, "/") - if base == "" || base == "." { - base = "." - } - return findAllGoDirs(base) - } - - // Single directory - return []string{pattern}, nil -} - -// findAllGoDirs finds all directories containing .go files. -func findAllGoDirs(root string) ([]string, error) { - var dirs []string - seen := make(map[string]bool) - - err := filepath.Walk(root, func(path string, info os.FileInfo, err error) error { - if err != nil { - return nil // Continue walking even on error - } - - if info == nil { - return nil - } - - // Skip vendor, testdata, and hidden directories (but not . itself) - if info.IsDir() { - name := info.Name() - if name == "vendor" || name == "testdata" || (strings.HasPrefix(name, ".") && name != ".") { - return filepath.SkipDir - } - return nil - } - - // Check for .go files - if strings.HasSuffix(path, ".go") { - dir := filepath.Dir(path) - if !seen[dir] { - seen[dir] = true - dirs = append(dirs, dir) - } - } - - return nil - }) - - return dirs, err -} - -// scanDirectory scans a directory for i18n key usage. -func scanDirectory(dir string) ([]KeyUsage, error) { - var usages []KeyUsage - - fset := token.NewFileSet() - // Parse all .go files except those ending exactly in _test.go - pkgs, err := parser.ParseDir(fset, dir, func(fi os.FileInfo) bool { - name := fi.Name() - // Only exclude files that are actual test files (ending in _test.go) - // Files like "go_test_cmd.go" should be included - return strings.HasSuffix(name, ".go") && !strings.HasSuffix(name, "_test.go") - }, 0) - if err != nil { - return nil, err - } - - for _, pkg := range pkgs { - for filename, file := range pkg.Files { - fileUsages := scanFile(fset, filename, file) - usages = append(usages, fileUsages...) - } - } - - return usages, nil -} - -// scanFile scans a single file for i18n key usage. -func scanFile(fset *token.FileSet, filename string, file *ast.File) []KeyUsage { - var usages []KeyUsage - - ast.Inspect(file, func(n ast.Node) bool { - call, ok := n.(*ast.CallExpr) - if !ok { - return true - } - - funcName := getFuncName(call) - if funcName == "" { - return true - } - - // Check for T(), C(), i18n.T(), i18n.C() - if funcName == "T" || funcName == "i18n.T" || funcName == "_" || funcName == "i18n._" { - if key := extractStringArg(call, 0); key != "" { - pos := fset.Position(call.Pos()) - usages = append(usages, KeyUsage{ - Key: key, - File: filename, - Line: pos.Line, - Function: "T", - }) - } - } else if funcName == "C" || funcName == "i18n.C" { - if key := extractStringArg(call, 0); key != "" { - pos := fset.Position(call.Pos()) - usages = append(usages, KeyUsage{ - Key: key, - File: filename, - Line: pos.Line, - Function: "C", - }) - } - } else if funcName == "I" || funcName == "i18n.I" { - if key := extractStringArg(call, 0); key != "" { - pos := fset.Position(call.Pos()) - usages = append(usages, KeyUsage{ - Key: key, - File: filename, - Line: pos.Line, - Function: "C", // I() is an intent builder - }) - } - } - - return true - }) - - return usages -} - -// getFuncName extracts the function name from a call expression. -func getFuncName(call *ast.CallExpr) string { - switch fn := call.Fun.(type) { - case *ast.Ident: - return fn.Name - case *ast.SelectorExpr: - if ident, ok := fn.X.(*ast.Ident); ok { - return ident.Name + "." + fn.Sel.Name - } - } - return "" -} - -// extractStringArg extracts a string literal from a call argument. -func extractStringArg(call *ast.CallExpr, index int) string { - if index >= len(call.Args) { - return "" - } - - arg := call.Args[index] - - // Direct string literal - if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING { - // Remove quotes - s := lit.Value - if len(s) >= 2 { - return s[1 : len(s)-1] - } - } - - // Identifier (constant reference) - we skip these as they're type-safe - if _, ok := arg.(*ast.Ident); ok { - return "" // Skip constants like IntentCoreDelete - } - - // Selector (like i18n.IntentCoreDelete) - skip these too - if _, ok := arg.(*ast.SelectorExpr); ok { - return "" - } - - return "" -} - -// validate validates key usages against valid keys and intents. -func validate(usages []KeyUsage, validKeys, validIntents map[string]bool) ValidationResult { - result := ValidationResult{ - TotalKeys: len(usages), - } - - for _, usage := range usages { - if usage.Function == "C" { - result.IntentKeys++ - // Check intent keys - if validIntents[usage.Key] { - result.ValidKeys++ - } else { - // Also allow custom intents (non-core.* prefix) - if !strings.HasPrefix(usage.Key, "core.") { - result.ValidKeys++ // Assume custom intents are valid - } else { - result.MissingKeys = append(result.MissingKeys, usage) - } - } - } else { - result.MessageKeys++ - // Check message keys - if validKeys[usage.Key] { - result.ValidKeys++ - } else if strings.HasPrefix(usage.Key, "core.") { - // core.* keys used with T() are intent keys - if validIntents[usage.Key] { - result.ValidKeys++ - } else { - result.MissingKeys = append(result.MissingKeys, usage) - } - } else { - result.MissingKeys = append(result.MissingKeys, usage) - } - } - } - - return result -} - -// printReport prints the validation report. -func printReport(result ValidationResult) { - fmt.Printf("i18n Validation Report\n") - fmt.Printf("======================\n\n") - fmt.Printf("Total keys scanned: %d\n", result.TotalKeys) - fmt.Printf(" Message keys (T): %d\n", result.MessageKeys) - fmt.Printf(" Intent keys (C): %d\n", result.IntentKeys) - fmt.Printf("Valid keys: %d\n", result.ValidKeys) - fmt.Printf("Missing keys: %d\n", len(result.MissingKeys)) - - if len(result.MissingKeys) > 0 { - fmt.Printf("\nMissing Keys:\n") - fmt.Printf("-------------\n") - - // Sort by file then line - sort.Slice(result.MissingKeys, func(i, j int) bool { - if result.MissingKeys[i].File != result.MissingKeys[j].File { - return result.MissingKeys[i].File < result.MissingKeys[j].File - } - return result.MissingKeys[i].Line < result.MissingKeys[j].Line - }) - - for _, usage := range result.MissingKeys { - fmt.Printf(" %s:%d: %s(%q)\n", usage.File, usage.Line, usage.Function, usage.Key) - } - - fmt.Printf("\nAdd these keys to pkg/i18n/locales/en_GB.json or use constants from pkg/i18n/keys.go\n") - } else { - fmt.Printf("\nAll keys are valid!\n") - } -} diff --git a/internal/variants/ci.go b/internal/variants/ci.go deleted file mode 100644 index 313dd47..0000000 --- a/internal/variants/ci.go +++ /dev/null @@ -1,23 +0,0 @@ -//go:build ci - -// ci.go imports packages for the minimal CI/release binary. -// -// Build with: go build -tags ci -// -// This variant includes only commands needed for CI pipelines: -// - build: Cross-platform compilation -// - ci: Release publishing -// - sdk: API compatibility checks -// - doctor: Environment verification -// -// Use this build to reduce binary size and attack surface in production. - -package variants - -import ( - // Commands via self-registration - _ "github.com/host-uk/core/pkg/build/buildcmd" - _ "github.com/host-uk/core/pkg/ci" - _ "github.com/host-uk/core/pkg/doctor" - _ "github.com/host-uk/core/pkg/sdk" -) diff --git a/internal/variants/full.go b/internal/variants/full.go deleted file mode 100644 index 30542eb..0000000 --- a/internal/variants/full.go +++ /dev/null @@ -1,41 +0,0 @@ -//go:build !ci && !php && !minimal - -// full.go imports all packages for the full development binary. -// -// Build with: go build (default) -// -// This is the default build variant with all development tools: -// - dev: Multi-repo git workflows (commit, push, pull, sync) -// - ai: AI agent task management -// - go: Go module and build tools -// - php: Laravel/Composer development tools -// - build: Cross-platform compilation -// - ci: Release publishing -// - sdk: API compatibility checks -// - pkg: Package management -// - vm: LinuxKit VM management -// - docs: Documentation generation -// - setup: Repository cloning and setup -// - doctor: Environment health checks -// - test: Test runner with coverage - -package variants - -import ( - // Commands via self-registration - _ "github.com/host-uk/core/pkg/ai" - _ "github.com/host-uk/core/pkg/build/buildcmd" - _ "github.com/host-uk/core/pkg/ci" - _ "github.com/host-uk/core/pkg/dev" - _ "github.com/host-uk/core/pkg/docs" - _ "github.com/host-uk/core/pkg/doctor" - _ "github.com/host-uk/core/pkg/go" - _ "github.com/host-uk/core/pkg/php" - _ "github.com/host-uk/core/pkg/pkgcmd" - _ "github.com/host-uk/core/pkg/sdk" - _ "github.com/host-uk/core/pkg/security" - _ "github.com/host-uk/core/pkg/setup" - _ "github.com/host-uk/core/pkg/test" - _ "github.com/host-uk/core/pkg/vm" - _ "github.com/host-uk/core/pkg/workspace" -) diff --git a/internal/variants/minimal.go b/internal/variants/minimal.go deleted file mode 100644 index 69f4bff..0000000 --- a/internal/variants/minimal.go +++ /dev/null @@ -1,17 +0,0 @@ -//go:build minimal - -// minimal.go imports only core packages for a minimal binary. -// -// Build with: go build -tags minimal -// -// This variant includes only the absolute essentials: -// - doctor: Environment verification -// -// Use this for the smallest possible binary with just health checks. - -package variants - -import ( - // Commands via self-registration - _ "github.com/host-uk/core/pkg/doctor" -) diff --git a/internal/variants/php.go b/internal/variants/php.go deleted file mode 100644 index c7a574d..0000000 --- a/internal/variants/php.go +++ /dev/null @@ -1,19 +0,0 @@ -//go:build php - -// php.go imports packages for the PHP-only binary. -// -// Build with: go build -tags php -// -// This variant includes only PHP/Laravel development tools: -// - php: Laravel/Composer development tools -// - doctor: Environment verification -// -// Use this for PHP-focused workflows without other tooling. - -package variants - -import ( - // Commands via self-registration - _ "github.com/host-uk/core/pkg/doctor" - _ "github.com/host-uk/core/pkg/php" -) diff --git a/ipc.go b/ipc.go new file mode 100644 index 0000000..5f22c6f --- /dev/null +++ b/ipc.go @@ -0,0 +1,72 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Message bus for the Core framework. +// Dispatches actions (fire-and-forget), queries (first responder), +// and tasks (first executor) between registered handlers. + +package core + +import ( + "slices" + "sync" +) + +// Ipc holds IPC dispatch data. +type Ipc struct { + ipcMu sync.RWMutex + ipcHandlers []func(*Core, Message) Result + + queryMu sync.RWMutex + queryHandlers []QueryHandler + + taskMu sync.RWMutex + taskHandlers []TaskHandler +} + +func (c *Core) Action(msg Message) Result { + c.ipc.ipcMu.RLock() + handlers := slices.Clone(c.ipc.ipcHandlers) + c.ipc.ipcMu.RUnlock() + + for _, h := range handlers { + if r := h(c, msg); !r.OK { + return r + } + } + return Result{OK: true} +} + +func (c *Core) Query(q Query) Result { + c.ipc.queryMu.RLock() + handlers := slices.Clone(c.ipc.queryHandlers) + c.ipc.queryMu.RUnlock() + + for _, h := range handlers { + r := h(c, q) + if r.OK { + return r + } + } + return Result{} +} + +func (c *Core) QueryAll(q Query) Result { + c.ipc.queryMu.RLock() + handlers := slices.Clone(c.ipc.queryHandlers) + c.ipc.queryMu.RUnlock() + + var results []any + for _, h := range handlers { + r := h(c, q) + if r.OK && r.Value != nil { + results = append(results, r.Value) + } + } + return Result{results, true} +} + +func (c *Core) RegisterQuery(handler QueryHandler) { + c.ipc.queryMu.Lock() + c.ipc.queryHandlers = append(c.ipc.queryHandlers, handler) + c.ipc.queryMu.Unlock() +} diff --git a/ipc_test.go b/ipc_test.go new file mode 100644 index 0000000..005ef6a --- /dev/null +++ b/ipc_test.go @@ -0,0 +1,95 @@ +package core_test + +import ( + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- IPC: Actions --- + +type testMessage struct{ payload string } + +func TestAction_Good(t *testing.T) { + c := New().Value.(*Core) + var received Message + c.RegisterAction(func(_ *Core, msg Message) Result { + received = msg + return Result{OK: true} + }) + r := c.ACTION(testMessage{payload: "hello"}) + assert.True(t, r.OK) + assert.Equal(t, testMessage{payload: "hello"}, received) +} + +func TestAction_Multiple_Good(t *testing.T) { + c := New().Value.(*Core) + count := 0 + handler := func(_ *Core, _ Message) Result { count++; return Result{OK: true} } + c.RegisterActions(handler, handler, handler) + c.ACTION(nil) + assert.Equal(t, 3, count) +} + +func TestAction_None_Good(t *testing.T) { + c := New().Value.(*Core) + // No handlers registered — should succeed + r := c.ACTION(nil) + assert.True(t, r.OK) +} + +// --- IPC: Queries --- + +func TestQuery_Good(t *testing.T) { + c := New().Value.(*Core) + c.RegisterQuery(func(_ *Core, q Query) Result { + if q == "ping" { + return Result{Value: "pong", OK: true} + } + return Result{} + }) + r := c.QUERY("ping") + assert.True(t, r.OK) + assert.Equal(t, "pong", r.Value) +} + +func TestQuery_Unhandled_Good(t *testing.T) { + c := New().Value.(*Core) + c.RegisterQuery(func(_ *Core, q Query) Result { + return Result{} + }) + r := c.QUERY("unknown") + assert.False(t, r.OK) +} + +func TestQueryAll_Good(t *testing.T) { + c := New().Value.(*Core) + c.RegisterQuery(func(_ *Core, _ Query) Result { + return Result{Value: "a", OK: true} + }) + c.RegisterQuery(func(_ *Core, _ Query) Result { + return Result{Value: "b", OK: true} + }) + r := c.QUERYALL("anything") + assert.True(t, r.OK) + results := r.Value.([]any) + assert.Len(t, results, 2) + assert.Contains(t, results, "a") + assert.Contains(t, results, "b") +} + +// --- IPC: Tasks --- + +func TestPerform_Good(t *testing.T) { + c := New().Value.(*Core) + c.RegisterTask(func(_ *Core, t Task) Result { + if t == "compute" { + return Result{Value: 42, OK: true} + } + return Result{} + }) + r := c.PERFORM("compute") + assert.True(t, r.OK) + assert.Equal(t, 42, r.Value) +} diff --git a/lock.go b/lock.go new file mode 100644 index 0000000..a87181d --- /dev/null +++ b/lock.go @@ -0,0 +1,89 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Synchronisation, locking, and lifecycle snapshots for the Core framework. + +package core + +import ( + "sync" +) + +// package-level mutex infrastructure +var ( + lockMu sync.Mutex + lockMap = make(map[string]*sync.RWMutex) +) + +// Lock is the DTO for a named mutex. +type Lock struct { + Name string + Mutex *sync.RWMutex +} + +// Lock returns a named Lock, creating the mutex if needed. +func (c *Core) Lock(name string) *Lock { + lockMu.Lock() + m, ok := lockMap[name] + if !ok { + m = &sync.RWMutex{} + lockMap[name] = m + } + lockMu.Unlock() + return &Lock{Name: name, Mutex: m} +} + +// LockEnable marks that the service lock should be applied after initialisation. +func (c *Core) LockEnable(name ...string) { + n := "srv" + if len(name) > 0 { + n = name[0] + } + c.Lock(n).Mutex.Lock() + defer c.Lock(n).Mutex.Unlock() + c.services.lockEnabled = true +} + +// LockApply activates the service lock if it was enabled. +func (c *Core) LockApply(name ...string) { + n := "srv" + if len(name) > 0 { + n = name[0] + } + c.Lock(n).Mutex.Lock() + defer c.Lock(n).Mutex.Unlock() + if c.services.lockEnabled { + c.services.locked = true + } +} + +// Startables returns services that have an OnStart function. +func (c *Core) Startables() Result { + if c.services == nil { + return Result{} + } + c.Lock("srv").Mutex.RLock() + defer c.Lock("srv").Mutex.RUnlock() + var out []*Service + for _, svc := range c.services.services { + if svc.OnStart != nil { + out = append(out, svc) + } + } + return Result{out, true} +} + +// Stoppables returns services that have an OnStop function. +func (c *Core) Stoppables() Result { + if c.services == nil { + return Result{} + } + c.Lock("srv").Mutex.RLock() + defer c.Lock("srv").Mutex.RUnlock() + var out []*Service + for _, svc := range c.services.services { + if svc.OnStop != nil { + out = append(out, svc) + } + } + return Result{out, true} +} diff --git a/lock_test.go b/lock_test.go new file mode 100644 index 0000000..93b574a --- /dev/null +++ b/lock_test.go @@ -0,0 +1,55 @@ +package core_test + +import ( + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +func TestLock_Good(t *testing.T) { + c := New().Value.(*Core) + lock := c.Lock("test") + assert.NotNil(t, lock) + assert.NotNil(t, lock.Mutex) +} + +func TestLock_SameName_Good(t *testing.T) { + c := New().Value.(*Core) + l1 := c.Lock("shared") + l2 := c.Lock("shared") + assert.Equal(t, l1, l2) +} + +func TestLock_DifferentName_Good(t *testing.T) { + c := New().Value.(*Core) + l1 := c.Lock("a") + l2 := c.Lock("b") + assert.NotEqual(t, l1, l2) +} + +func TestLockEnable_Good(t *testing.T) { + c := New().Value.(*Core) + c.Service("early", Service{}) + c.LockEnable() + c.LockApply() + + r := c.Service("late", Service{}) + assert.False(t, r.OK) +} + +func TestStartables_Good(t *testing.T) { + c := New().Value.(*Core) + c.Service("s", Service{OnStart: func() Result { return Result{OK: true} }}) + r := c.Startables() + assert.True(t, r.OK) + assert.Len(t, r.Value.([]*Service), 1) +} + +func TestStoppables_Good(t *testing.T) { + c := New().Value.(*Core) + c.Service("s", Service{OnStop: func() Result { return Result{OK: true} }}) + r := c.Stoppables() + assert.True(t, r.OK) + assert.Len(t, r.Value.([]*Service), 1) +} diff --git a/log.go b/log.go new file mode 100644 index 0000000..65f8c5f --- /dev/null +++ b/log.go @@ -0,0 +1,402 @@ +// Structured logging for the Core framework. +// +// core.SetLevel(core.LevelDebug) +// core.Info("server started", "port", 8080) +// core.Error("failed to connect", "err", err) +package core + +import ( + goio "io" + "os" + "os/user" + "slices" + "sync" + "sync/atomic" + "time" +) + +// Level defines logging verbosity. +type Level int + +// Logging level constants ordered by increasing verbosity. +const ( + // LevelQuiet suppresses all log output. + LevelQuiet Level = iota + // LevelError shows only error messages. + LevelError + // LevelWarn shows warnings and errors. + LevelWarn + // LevelInfo shows informational messages, warnings, and errors. + LevelInfo + // LevelDebug shows all messages including debug details. + LevelDebug +) + +// String returns the level name. +func (l Level) String() string { + switch l { + case LevelQuiet: + return "quiet" + case LevelError: + return "error" + case LevelWarn: + return "warn" + case LevelInfo: + return "info" + case LevelDebug: + return "debug" + default: + return "unknown" + } +} + +// Log provides structured logging. +type Log struct { + mu sync.RWMutex + level Level + output goio.Writer + + // RedactKeys is a list of keys whose values should be masked in logs. + redactKeys []string + + // Style functions for formatting (can be overridden) + StyleTimestamp func(string) string + StyleDebug func(string) string + StyleInfo func(string) string + StyleWarn func(string) string + StyleError func(string) string + StyleSecurity func(string) string +} + +// RotationLogOptions defines the log rotation and retention policy. +type RotationLogOptions struct { + // Filename is the log file path. If empty, rotation is disabled. + Filename string + + // MaxSize is the maximum size of the log file in megabytes before it gets rotated. + // It defaults to 100 megabytes. + MaxSize int + + // MaxAge is the maximum number of days to retain old log files based on their + // file modification time. It defaults to 28 days. + // Note: set to a negative value to disable age-based retention. + MaxAge int + + // MaxBackups is the maximum number of old log files to retain. + // It defaults to 5 backups. + MaxBackups int + + // Compress determines if the rotated log files should be compressed using gzip. + // It defaults to true. + Compress bool +} + +// LogOptions configures a Log. +type LogOptions struct { + Level Level + // Output is the destination for log messages. If Rotation is provided, + // Output is ignored and logs are written to the rotating file instead. + Output goio.Writer + // Rotation enables log rotation to file. If provided, Filename must be set. + Rotation *RotationLogOptions + // RedactKeys is a list of keys whose values should be masked in logs. + RedactKeys []string +} + +// RotationWriterFactory creates a rotating writer from options. +// Set this to enable log rotation (provided by core/go-io integration). +var RotationWriterFactory func(RotationLogOptions) goio.WriteCloser + +// New creates a new Log with the given options. +func NewLog(opts LogOptions) *Log { + output := opts.Output + if opts.Rotation != nil && opts.Rotation.Filename != "" && RotationWriterFactory != nil { + output = RotationWriterFactory(*opts.Rotation) + } + if output == nil { + output = os.Stderr + } + + return &Log{ + level: opts.Level, + output: output, + redactKeys: slices.Clone(opts.RedactKeys), + StyleTimestamp: identity, + StyleDebug: identity, + StyleInfo: identity, + StyleWarn: identity, + StyleError: identity, + StyleSecurity: identity, + } +} + +func identity(s string) string { return s } + +// SetLevel changes the log level. +func (l *Log) SetLevel(level Level) { + l.mu.Lock() + l.level = level + l.mu.Unlock() +} + +// Level returns the current log level. +func (l *Log) Level() Level { + l.mu.RLock() + defer l.mu.RUnlock() + return l.level +} + +// SetOutput changes the output writer. +func (l *Log) SetOutput(w goio.Writer) { + l.mu.Lock() + l.output = w + l.mu.Unlock() +} + +// SetRedactKeys sets the keys to be redacted. +func (l *Log) SetRedactKeys(keys ...string) { + l.mu.Lock() + l.redactKeys = slices.Clone(keys) + l.mu.Unlock() +} + +func (l *Log) shouldLog(level Level) bool { + l.mu.RLock() + defer l.mu.RUnlock() + return level <= l.level +} + +func (l *Log) log(level Level, prefix, msg string, keyvals ...any) { + l.mu.RLock() + output := l.output + styleTimestamp := l.StyleTimestamp + redactKeys := l.redactKeys + l.mu.RUnlock() + + timestamp := styleTimestamp(time.Now().Format("15:04:05")) + + // Copy keyvals to avoid mutating the caller's slice + keyvals = append([]any(nil), keyvals...) + + // Automatically extract context from error if present in keyvals + origLen := len(keyvals) + for i := 0; i < origLen; i += 2 { + if i+1 < origLen { + if err, ok := keyvals[i+1].(error); ok { + if op := Operation(err); op != "" { + // Check if op is already in keyvals + hasOp := false + for j := 0; j < len(keyvals); j += 2 { + if k, ok := keyvals[j].(string); ok && k == "op" { + hasOp = true + break + } + } + if !hasOp { + keyvals = append(keyvals, "op", op) + } + } + if stack := FormatStackTrace(err); stack != "" { + // Check if stack is already in keyvals + hasStack := false + for j := 0; j < len(keyvals); j += 2 { + if k, ok := keyvals[j].(string); ok && k == "stack" { + hasStack = true + break + } + } + if !hasStack { + keyvals = append(keyvals, "stack", stack) + } + } + } + } + } + + // Format key-value pairs + var kvStr string + if len(keyvals) > 0 { + kvStr = " " + for i := 0; i < len(keyvals); i += 2 { + if i > 0 { + kvStr += " " + } + key := keyvals[i] + var val any + if i+1 < len(keyvals) { + val = keyvals[i+1] + } + + // Redaction logic + keyStr := Sprint(key) + if slices.Contains(redactKeys, keyStr) { + val = "[REDACTED]" + } + + // Secure formatting to prevent log injection + if s, ok := val.(string); ok { + kvStr += Sprintf("%v=%q", key, s) + } else { + kvStr += Sprintf("%v=%v", key, val) + } + } + } + + Print(output, "%s %s %s%s", timestamp, prefix, msg, kvStr) +} + +// Debug logs a debug message with optional key-value pairs. +func (l *Log) Debug(msg string, keyvals ...any) { + if l.shouldLog(LevelDebug) { + l.log(LevelDebug, l.StyleDebug("[DBG]"), msg, keyvals...) + } +} + +// Info logs an info message with optional key-value pairs. +func (l *Log) Info(msg string, keyvals ...any) { + if l.shouldLog(LevelInfo) { + l.log(LevelInfo, l.StyleInfo("[INF]"), msg, keyvals...) + } +} + +// Warn logs a warning message with optional key-value pairs. +func (l *Log) Warn(msg string, keyvals ...any) { + if l.shouldLog(LevelWarn) { + l.log(LevelWarn, l.StyleWarn("[WRN]"), msg, keyvals...) + } +} + +// Error logs an error message with optional key-value pairs. +func (l *Log) Error(msg string, keyvals ...any) { + if l.shouldLog(LevelError) { + l.log(LevelError, l.StyleError("[ERR]"), msg, keyvals...) + } +} + +// Security logs a security event with optional key-value pairs. +// It uses LevelError to ensure security events are visible even in restrictive +// log configurations. +func (l *Log) Security(msg string, keyvals ...any) { + if l.shouldLog(LevelError) { + l.log(LevelError, l.StyleSecurity("[SEC]"), msg, keyvals...) + } +} + +// Username returns the current system username. +// It uses os/user for reliability and falls back to environment variables. +func Username() string { + if u, err := user.Current(); err == nil { + return u.Username + } + // Fallback for environments where user lookup might fail + if u := os.Getenv("USER"); u != "" { + return u + } + return os.Getenv("USERNAME") +} + +// --- Default logger --- + +var defaultLogPtr atomic.Pointer[Log] + +func init() { + l := NewLog(LogOptions{Level: LevelInfo}) + defaultLogPtr.Store(l) +} + +// Default returns the default logger. +func Default() *Log { + return defaultLogPtr.Load() +} + +// SetDefault sets the default logger. +func SetDefault(l *Log) { + defaultLogPtr.Store(l) +} + +// SetLevel sets the default logger's level. +func SetLevel(level Level) { + Default().SetLevel(level) +} + +// SetRedactKeys sets the default logger's redaction keys. +func SetRedactKeys(keys ...string) { + Default().SetRedactKeys(keys...) +} + +// Debug logs to the default logger. +func Debug(msg string, keyvals ...any) { + Default().Debug(msg, keyvals...) +} + +// Info logs to the default logger. +func Info(msg string, keyvals ...any) { + Default().Info(msg, keyvals...) +} + +// Warn logs to the default logger. +func Warn(msg string, keyvals ...any) { + Default().Warn(msg, keyvals...) +} + +// Error logs to the default logger. +func Error(msg string, keyvals ...any) { + Default().Error(msg, keyvals...) +} + +// Security logs to the default logger. +func Security(msg string, keyvals ...any) { + Default().Security(msg, keyvals...) +} + +// --- LogErr: Error-Aware Logger --- + +// LogErr logs structured information extracted from errors. +// Primary action: log. Secondary: extract error context. +type LogErr struct { + log *Log +} + +// NewLogErr creates a LogErr bound to the given logger. +func NewLogErr(log *Log) *LogErr { + return &LogErr{log: log} +} + +// Log extracts context from an Err and logs it at Error level. +func (le *LogErr) Log(err error) { + if err == nil { + return + } + le.log.Error(ErrorMessage(err), "op", Operation(err), "code", ErrorCode(err), "stack", FormatStackTrace(err)) +} + +// --- LogPanic: Panic-Aware Logger --- + +// LogPanic logs panic context without crash file management. +// Primary action: log. Secondary: recover panics. +type LogPanic struct { + log *Log +} + +// NewLogPanic creates a LogPanic bound to the given logger. +func NewLogPanic(log *Log) *LogPanic { + return &LogPanic{log: log} +} + +// Recover captures a panic and logs it. Does not write crash files. +// Use as: defer core.NewLogPanic(logger).Recover() +func (lp *LogPanic) Recover() { + r := recover() + if r == nil { + return + } + err, ok := r.(error) + if !ok { + err = NewError(Sprint("panic: ", r)) + } + lp.log.Error("panic recovered", + "err", err, + "op", Operation(err), + "stack", FormatStackTrace(err), + ) +} diff --git a/log_test.go b/log_test.go new file mode 100644 index 0000000..60b6f6c --- /dev/null +++ b/log_test.go @@ -0,0 +1,165 @@ +package core_test + +import ( + "os" + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- Log --- + +func TestLog_New_Good(t *testing.T) { + l := NewLog(LogOptions{Level: LevelInfo}) + assert.NotNil(t, l) +} + +func TestLog_AllLevels_Good(t *testing.T) { + l := NewLog(LogOptions{Level: LevelDebug}) + l.Debug("debug") + l.Info("info") + l.Warn("warn") + l.Error("error") + l.Security("security event") +} + +func TestLog_LevelFiltering_Good(t *testing.T) { + // At Error level, Debug/Info/Warn should be suppressed (no panic) + l := NewLog(LogOptions{Level: LevelError}) + l.Debug("suppressed") + l.Info("suppressed") + l.Warn("suppressed") + l.Error("visible") +} + +func TestLog_SetLevel_Good(t *testing.T) { + l := NewLog(LogOptions{Level: LevelInfo}) + l.SetLevel(LevelDebug) + assert.Equal(t, LevelDebug, l.Level()) +} + +func TestLog_SetRedactKeys_Good(t *testing.T) { + l := NewLog(LogOptions{Level: LevelInfo}) + l.SetRedactKeys("password", "token") + // Redacted keys should mask values in output + l.Info("login", "password", "secret123", "user", "admin") +} + +func TestLog_LevelString_Good(t *testing.T) { + assert.Equal(t, "debug", LevelDebug.String()) + assert.Equal(t, "info", LevelInfo.String()) + assert.Equal(t, "warn", LevelWarn.String()) + assert.Equal(t, "error", LevelError.String()) +} + +func TestLog_CoreLog_Good(t *testing.T) { + c := New().Value.(*Core) + assert.NotNil(t, c.Log()) +} + +func TestLog_ErrorSink_Good(t *testing.T) { + l := NewLog(LogOptions{Level: LevelInfo}) + var sink ErrorSink = l + sink.Error("test") + sink.Warn("test") +} + +// --- Default Logger --- + +func TestLog_Default_Good(t *testing.T) { + d := Default() + assert.NotNil(t, d) +} + +func TestLog_SetDefault_Good(t *testing.T) { + original := Default() + defer SetDefault(original) + + custom := NewLog(LogOptions{Level: LevelDebug}) + SetDefault(custom) + assert.Equal(t, custom, Default()) +} + +func TestLog_PackageLevelFunctions_Good(t *testing.T) { + // Package-level log functions use the default logger + Debug("debug msg") + Info("info msg") + Warn("warn msg") + Error("error msg") + Security("security msg") +} + +func TestLog_PackageSetLevel_Good(t *testing.T) { + original := Default() + defer SetDefault(original) + + SetLevel(LevelDebug) + SetRedactKeys("secret") +} + +func TestLog_Username_Good(t *testing.T) { + u := Username() + assert.NotEmpty(t, u) +} + +// --- LogErr --- + +func TestLogErr_Good(t *testing.T) { + l := NewLog(LogOptions{Level: LevelInfo}) + le := NewLogErr(l) + assert.NotNil(t, le) + + err := E("test.Operation", "something broke", nil) + le.Log(err) +} + +func TestLogErr_Nil_Good(t *testing.T) { + l := NewLog(LogOptions{Level: LevelInfo}) + le := NewLogErr(l) + le.Log(nil) // should not panic +} + +// --- LogPanic --- + +func TestLogPanic_Good(t *testing.T) { + l := NewLog(LogOptions{Level: LevelInfo}) + lp := NewLogPanic(l) + assert.NotNil(t, lp) +} + +func TestLogPanic_Recover_Good(t *testing.T) { + l := NewLog(LogOptions{Level: LevelInfo}) + lp := NewLogPanic(l) + assert.NotPanics(t, func() { + defer lp.Recover() + panic("caught") + }) +} + +// --- SetOutput --- + +func TestLog_SetOutput_Good(t *testing.T) { + l := NewLog(LogOptions{Level: LevelInfo}) + l.SetOutput(os.Stderr) + l.Info("redirected") +} + +// --- Log suppression by level --- + +func TestLog_Quiet_Suppresses_Ugly(t *testing.T) { + l := NewLog(LogOptions{Level: LevelQuiet}) + // These should not panic even though nothing is logged + l.Debug("suppressed") + l.Info("suppressed") + l.Warn("suppressed") + l.Error("suppressed") +} + +func TestLog_ErrorLevel_Suppresses_Ugly(t *testing.T) { + l := NewLog(LogOptions{Level: LevelError}) + l.Debug("suppressed") // below threshold + l.Info("suppressed") // below threshold + l.Warn("suppressed") // below threshold + l.Error("visible") // at threshold +} diff --git a/main.go b/main.go deleted file mode 100644 index 1a85275..0000000 --- a/main.go +++ /dev/null @@ -1,13 +0,0 @@ -package main - -import ( - "github.com/host-uk/core/pkg/cli" - - // Build variants import commands via self-registration. - // See internal/variants/ for available variants: full, ci, php, minimal. - _ "github.com/host-uk/core/internal/variants" -) - -func main() { - cli.Main() -} diff --git a/mkdocs.yml b/mkdocs.yml deleted file mode 100644 index cd4107c..0000000 --- a/mkdocs.yml +++ /dev/null @@ -1,73 +0,0 @@ -site_name: Core Framework -site_url: https://core.help -site_description: 'A Web3 Framework for building Go desktop applications with Wails v3' -site_author: 'Snider' -repo_url: 'https://github.com/Snider/Core' -repo_name: 'Snider/Core' - -theme: - name: material - palette: - - scheme: default - primary: deep purple - accent: purple - toggle: - icon: material/brightness-7 - name: Switch to dark mode - - scheme: slate - primary: deep purple - accent: purple - toggle: - icon: material/brightness-4 - name: Switch to light mode - features: - - navigation.tabs - - navigation.sections - - navigation.expand - - navigation.top - - search.suggest - - search.highlight - - content.tabs.link - - content.code.copy - -markdown_extensions: - - pymdownx.highlight: - anchor_linenums: true - - pymdownx.superfences - - pymdownx.tabbed: - alternate_style: true - - admonition - - pymdownx.details - - attr_list - - md_in_html - -nav: - - Home: index.md - - Getting Started: - - Installation: getting-started/installation.md - - Quick Start: getting-started/quickstart.md - - Architecture: getting-started/architecture.md - - Core Framework: - - Overview: core/overview.md - - Services: core/services.md - - Lifecycle: core/lifecycle.md - - IPC & Actions: core/ipc.md - - Services: - - Config: services/config.md - - Display: services/display.md - - WebView: services/webview.md - - MCP: services/mcp.md - - Crypt: services/crypt.md - - I18n: services/i18n.md - - IO: services/io.md - - Workspace: services/workspace.md - - Help: services/help.md - - Extensions: - - Plugin System: extensions/plugins.md - - Module System: extensions/modules.md - - GUI Application: - - Overview: gui/overview.md - - MCP Bridge: gui/mcp-bridge.md - - API Reference: - - Core: api/core.md - - Display: api/display.md diff --git a/options.go b/options.go new file mode 100644 index 0000000..4d4c5f8 --- /dev/null +++ b/options.go @@ -0,0 +1,140 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Core primitives: Option, Options, Result. +// +// Option is a single key-value pair. Options is a collection. +// Any function that returns Result can accept Options. +// +// Create options: +// +// opts := core.Options{ +// {Key: "name", Value: "brain"}, +// {Key: "path", Value: "prompts"}, +// } +// +// Read options: +// +// name := opts.String("name") +// port := opts.Int("port") +// ok := opts.Has("debug") +// +// Use with subsystems: +// +// c.Drive().New(core.Options{ +// {Key: "name", Value: "brain"}, +// {Key: "source", Value: brainFS}, +// {Key: "path", Value: "prompts"}, +// }) +// +// Use with New: +// +// c := core.New(core.Options{ +// {Key: "name", Value: "myapp"}, +// }) +package core + +// Result is the universal return type for Core operations. +// Replaces the (value, error) pattern — errors flow through Core internally. +// +// r := c.Data().New(core.Options{{Key: "name", Value: "brain"}}) +// if r.OK { use(r.Result()) } +type Result struct { + Value any + OK bool +} + +// Result gets or sets the value. Zero args returns Value. With args, maps +// Go (value, error) pairs to Result and returns self. +// +// r.Result(file, err) // OK = err == nil, Value = file +// r.Result(value) // OK = true, Value = value +// r.Result() // after set — returns the value +func (r Result) Result(args ...any) Result { + if len(args) == 0 { + return r + } + + if len(args) == 1 { + return Result{args[0], true} + } + + if err, ok := args[len(args)-1].(error); ok { + if err != nil { + return Result{err, false} + } + return Result{args[0], true} + } + return Result{args[0], true} +} + +// Option is a single key-value configuration pair. +// +// core.Option{Key: "name", Value: "brain"} +// core.Option{Key: "port", Value: 8080} +type Option struct { + Key string + Value any +} + +// Options is a collection of Option items. +// The universal input type for Core operations. +// +// opts := core.Options{{Key: "name", Value: "myapp"}} +// name := opts.String("name") +type Options []Option + +// Get retrieves a value by key. +// +// r := opts.Get("name") +// if r.OK { name := r.Value.(string) } +func (o Options) Get(key string) Result { + for _, opt := range o { + if opt.Key == key { + return Result{opt.Value, true} + } + } + return Result{} +} + +// Has returns true if a key exists. +// +// if opts.Has("debug") { ... } +func (o Options) Has(key string) bool { + return o.Get(key).OK +} + +// String retrieves a string value, empty string if missing. +// +// name := opts.String("name") +func (o Options) String(key string) string { + r := o.Get(key) + if !r.OK { + return "" + } + s, _ := r.Value.(string) + return s +} + +// Int retrieves an int value, 0 if missing. +// +// port := opts.Int("port") +func (o Options) Int(key string) int { + r := o.Get(key) + if !r.OK { + return 0 + } + i, _ := r.Value.(int) + return i +} + +// Bool retrieves a bool value, false if missing. +// +// debug := opts.Bool("debug") +func (o Options) Bool(key string) bool { + r := o.Get(key) + if !r.OK { + return false + } + b, _ := r.Value.(bool) + return b +} diff --git a/options_test.go b/options_test.go new file mode 100644 index 0000000..4556062 --- /dev/null +++ b/options_test.go @@ -0,0 +1,94 @@ +package core_test + +import ( + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- Option / Options --- + +func TestOptions_Get_Good(t *testing.T) { + opts := Options{ + {Key: "name", Value: "brain"}, + {Key: "port", Value: 8080}, + } + r := opts.Get("name") + assert.True(t, r.OK) + assert.Equal(t, "brain", r.Value) +} + +func TestOptions_Get_Bad(t *testing.T) { + opts := Options{{Key: "name", Value: "brain"}} + r := opts.Get("missing") + assert.False(t, r.OK) + assert.Nil(t, r.Value) +} + +func TestOptions_Has_Good(t *testing.T) { + opts := Options{{Key: "debug", Value: true}} + assert.True(t, opts.Has("debug")) + assert.False(t, opts.Has("missing")) +} + +func TestOptions_String_Good(t *testing.T) { + opts := Options{{Key: "name", Value: "brain"}} + assert.Equal(t, "brain", opts.String("name")) +} + +func TestOptions_String_Bad(t *testing.T) { + opts := Options{{Key: "port", Value: 8080}} + // Wrong type — returns empty string + assert.Equal(t, "", opts.String("port")) + // Missing key — returns empty string + assert.Equal(t, "", opts.String("missing")) +} + +func TestOptions_Int_Good(t *testing.T) { + opts := Options{{Key: "port", Value: 8080}} + assert.Equal(t, 8080, opts.Int("port")) +} + +func TestOptions_Int_Bad(t *testing.T) { + opts := Options{{Key: "name", Value: "brain"}} + assert.Equal(t, 0, opts.Int("name")) + assert.Equal(t, 0, opts.Int("missing")) +} + +func TestOptions_Bool_Good(t *testing.T) { + opts := Options{{Key: "debug", Value: true}} + assert.True(t, opts.Bool("debug")) +} + +func TestOptions_Bool_Bad(t *testing.T) { + opts := Options{{Key: "name", Value: "brain"}} + assert.False(t, opts.Bool("name")) + assert.False(t, opts.Bool("missing")) +} + +func TestOptions_TypedStruct_Good(t *testing.T) { + // Packages plug typed structs into Option.Value + type BrainConfig struct { + Name string + OllamaURL string + Collection string + } + cfg := BrainConfig{Name: "brain", OllamaURL: "http://localhost:11434", Collection: "openbrain"} + opts := Options{{Key: "config", Value: cfg}} + + r := opts.Get("config") + assert.True(t, r.OK) + bc, ok := r.Value.(BrainConfig) + assert.True(t, ok) + assert.Equal(t, "brain", bc.Name) + assert.Equal(t, "http://localhost:11434", bc.OllamaURL) +} + +func TestOptions_Empty_Good(t *testing.T) { + opts := Options{} + assert.False(t, opts.Has("anything")) + assert.Equal(t, "", opts.String("anything")) + assert.Equal(t, 0, opts.Int("anything")) + assert.False(t, opts.Bool("anything")) +} diff --git a/path.go b/path.go new file mode 100644 index 0000000..d977e9c --- /dev/null +++ b/path.go @@ -0,0 +1,174 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// OS-aware filesystem path operations for the Core framework. +// Uses Env("DS") for the separator and Core string primitives +// for path manipulation. filepath imported only for PathGlob. +// +// Path anchors relative segments to DIR_HOME: +// +// core.Path("Code", ".core") // "/Users/snider/Code/.core" +// core.Path("/tmp", "workspace") // "/tmp/workspace" +// core.Path() // "/Users/snider" +// +// Path component helpers: +// +// core.PathBase("/Users/snider/Code/core") // "core" +// core.PathDir("/Users/snider/Code/core") // "/Users/snider/Code" +// core.PathExt("main.go") // ".go" +package core + +import "path/filepath" + +// Path builds a clean, absolute filesystem path from segments. +// Uses Env("DS") for the OS directory separator. +// Relative paths are anchored to DIR_HOME. Absolute paths pass through. +// +// core.Path("Code", ".core") // "/Users/snider/Code/.core" +// core.Path("/tmp", "workspace") // "/tmp/workspace" +// core.Path() // "/Users/snider" +func Path(segments ...string) string { + ds := Env("DS") + home := Env("DIR_HOME") + if home == "" { + home = "." + } + if len(segments) == 0 { + return home + } + p := Join(ds, segments...) + if PathIsAbs(p) { + return CleanPath(p, ds) + } + return CleanPath(home+ds+p, ds) +} + +// PathBase returns the last element of a path. +// +// core.PathBase("/Users/snider/Code/core") // "core" +// core.PathBase("deploy/to/homelab") // "homelab" +func PathBase(p string) string { + if p == "" { + return "." + } + ds := Env("DS") + p = TrimSuffix(p, ds) + if p == "" { + return ds + } + parts := Split(p, ds) + return parts[len(parts)-1] +} + +// PathDir returns all but the last element of a path. +// +// core.PathDir("/Users/snider/Code/core") // "/Users/snider/Code" +func PathDir(p string) string { + if p == "" { + return "." + } + ds := Env("DS") + i := lastIndex(p, ds) + if i < 0 { + return "." + } + dir := p[:i] + if dir == "" { + return ds + } + return dir +} + +// PathExt returns the file extension including the dot. +// +// core.PathExt("main.go") // ".go" +// core.PathExt("Makefile") // "" +func PathExt(p string) string { + base := PathBase(p) + i := lastIndex(base, ".") + if i <= 0 { + return "" + } + return base[i:] +} + +// PathIsAbs returns true if the path is absolute. +// Handles Unix (starts with /) and Windows (drive letter like C:). +// +// core.PathIsAbs("/tmp") // true +// core.PathIsAbs("C:\\tmp") // true +// core.PathIsAbs("relative") // false +func PathIsAbs(p string) bool { + if p == "" { + return false + } + if p[0] == '/' { + return true + } + // Windows: C:\ or C:/ + if len(p) >= 3 && p[1] == ':' && (p[2] == '/' || p[2] == '\\') { + return true + } + return false +} + +// CleanPath removes redundant separators and resolves . and .. elements. +// +// core.CleanPath("/tmp//file", "/") // "/tmp/file" +// core.CleanPath("a/b/../c", "/") // "a/c" +func CleanPath(p, ds string) string { + if p == "" { + return "." + } + + rooted := HasPrefix(p, ds) + parts := Split(p, ds) + var clean []string + + for _, part := range parts { + switch part { + case "", ".": + continue + case "..": + if len(clean) > 0 && clean[len(clean)-1] != ".." { + clean = clean[:len(clean)-1] + } else if !rooted { + clean = append(clean, "..") + } + default: + clean = append(clean, part) + } + } + + result := Join(ds, clean...) + if rooted { + result = ds + result + } + if result == "" { + if rooted { + return ds + } + return "." + } + return result +} + +// PathGlob returns file paths matching a pattern. +// +// core.PathGlob("/tmp/agent-*.log") +func PathGlob(pattern string) []string { + matches, _ := filepath.Glob(pattern) + return matches +} + +// lastIndex returns the index of the last occurrence of substr in s, or -1. +func lastIndex(s, substr string) int { + if substr == "" || s == "" { + return -1 + } + for i := len(s) - len(substr); i >= 0; i-- { + if s[i:i+len(substr)] == substr { + return i + } + } + return -1 +} diff --git a/path_test.go b/path_test.go new file mode 100644 index 0000000..fdc8725 --- /dev/null +++ b/path_test.go @@ -0,0 +1,111 @@ +// SPDX-License-Identifier: EUPL-1.2 + +package core_test + +import ( + "os" + "path/filepath" + "testing" + + core "dappco.re/go/core" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestPath_Relative(t *testing.T) { + home, err := os.UserHomeDir() + require.NoError(t, err) + ds := core.Env("DS") + assert.Equal(t, home+ds+"Code"+ds+".core", core.Path("Code", ".core")) +} + +func TestPath_Absolute(t *testing.T) { + ds := core.Env("DS") + assert.Equal(t, "/tmp"+ds+"workspace", core.Path("/tmp", "workspace")) +} + +func TestPath_Empty(t *testing.T) { + home, err := os.UserHomeDir() + require.NoError(t, err) + assert.Equal(t, home, core.Path()) +} + +func TestPath_Cleans(t *testing.T) { + home, err := os.UserHomeDir() + require.NoError(t, err) + assert.Equal(t, home+core.Env("DS")+"Code", core.Path("Code", "sub", "..")) +} + +func TestPath_CleanDoubleSlash(t *testing.T) { + ds := core.Env("DS") + assert.Equal(t, ds+"tmp"+ds+"file", core.Path("/tmp//file")) +} + +func TestPathBase(t *testing.T) { + assert.Equal(t, "core", core.PathBase("/Users/snider/Code/core")) + assert.Equal(t, "homelab", core.PathBase("deploy/to/homelab")) +} + +func TestPathBase_Root(t *testing.T) { + assert.Equal(t, "/", core.PathBase("/")) +} + +func TestPathBase_Empty(t *testing.T) { + assert.Equal(t, ".", core.PathBase("")) +} + +func TestPathDir(t *testing.T) { + assert.Equal(t, "/Users/snider/Code", core.PathDir("/Users/snider/Code/core")) +} + +func TestPathDir_Root(t *testing.T) { + assert.Equal(t, "/", core.PathDir("/file")) +} + +func TestPathDir_NoDir(t *testing.T) { + assert.Equal(t, ".", core.PathDir("file.go")) +} + +func TestPathExt(t *testing.T) { + assert.Equal(t, ".go", core.PathExt("main.go")) + assert.Equal(t, "", core.PathExt("Makefile")) + assert.Equal(t, ".gz", core.PathExt("archive.tar.gz")) +} + +func TestPath_EnvConsistency(t *testing.T) { + assert.Equal(t, core.Env("DIR_HOME"), core.Path()) +} + +func TestPathGlob_Good(t *testing.T) { + dir := t.TempDir() + os.WriteFile(filepath.Join(dir, "a.txt"), []byte("a"), 0644) + os.WriteFile(filepath.Join(dir, "b.txt"), []byte("b"), 0644) + os.WriteFile(filepath.Join(dir, "c.log"), []byte("c"), 0644) + + matches := core.PathGlob(filepath.Join(dir, "*.txt")) + assert.Len(t, matches, 2) +} + +func TestPathGlob_NoMatch(t *testing.T) { + matches := core.PathGlob("/nonexistent/pattern-*.xyz") + assert.Empty(t, matches) +} + +func TestPathIsAbs_Good(t *testing.T) { + assert.True(t, core.PathIsAbs("/tmp")) + assert.True(t, core.PathIsAbs("/")) + assert.False(t, core.PathIsAbs("relative")) + assert.False(t, core.PathIsAbs("")) +} + +func TestCleanPath_Good(t *testing.T) { + assert.Equal(t, "/a/b", core.CleanPath("/a//b", "/")) + assert.Equal(t, "/a/c", core.CleanPath("/a/b/../c", "/")) + assert.Equal(t, "/", core.CleanPath("/", "/")) + assert.Equal(t, ".", core.CleanPath("", "/")) +} + +func TestPathDir_TrailingSlash(t *testing.T) { + result := core.PathDir("/Users/snider/Code/") + assert.Equal(t, "/Users/snider/Code", result) +} diff --git a/pkg/agentic/client.go b/pkg/agentic/client.go deleted file mode 100644 index c2213ca..0000000 --- a/pkg/agentic/client.go +++ /dev/null @@ -1,328 +0,0 @@ -package agentic - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "net/url" - "strconv" - "strings" - "time" - - "github.com/host-uk/core/pkg/errors" -) - -// Client is the API client for the core-agentic service. -type Client struct { - // BaseURL is the base URL of the API server. - BaseURL string - // Token is the authentication token. - Token string - // HTTPClient is the HTTP client used for requests. - HTTPClient *http.Client - // AgentID is the identifier for this agent when claiming tasks. - AgentID string -} - -// NewClient creates a new agentic API client with the given base URL and token. -func NewClient(baseURL, token string) *Client { - return &Client{ - BaseURL: strings.TrimSuffix(baseURL, "/"), - Token: token, - HTTPClient: &http.Client{ - Timeout: 30 * time.Second, - }, - } -} - -// NewClientFromConfig creates a new client from a Config struct. -func NewClientFromConfig(cfg *Config) *Client { - client := NewClient(cfg.BaseURL, cfg.Token) - client.AgentID = cfg.AgentID - return client -} - -// ListTasks retrieves a list of tasks matching the given options. -func (c *Client) ListTasks(ctx context.Context, opts ListOptions) ([]Task, error) { - const op = "agentic.Client.ListTasks" - - // Build query parameters - params := url.Values{} - if opts.Status != "" { - params.Set("status", string(opts.Status)) - } - if opts.Priority != "" { - params.Set("priority", string(opts.Priority)) - } - if opts.Project != "" { - params.Set("project", opts.Project) - } - if opts.ClaimedBy != "" { - params.Set("claimed_by", opts.ClaimedBy) - } - if opts.Limit > 0 { - params.Set("limit", strconv.Itoa(opts.Limit)) - } - if len(opts.Labels) > 0 { - params.Set("labels", strings.Join(opts.Labels, ",")) - } - - endpoint := c.BaseURL + "/api/tasks" - if len(params) > 0 { - endpoint += "?" + params.Encode() - } - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) - if err != nil { - return nil, errors.E(op, "failed to create request", err) - } - - c.setHeaders(req) - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return nil, errors.E(op, "request failed", err) - } - defer resp.Body.Close() - - if err := c.checkResponse(resp); err != nil { - return nil, errors.E(op, "API error", err) - } - - var tasks []Task - if err := json.NewDecoder(resp.Body).Decode(&tasks); err != nil { - return nil, errors.E(op, "failed to decode response", err) - } - - return tasks, nil -} - -// GetTask retrieves a single task by its ID. -func (c *Client) GetTask(ctx context.Context, id string) (*Task, error) { - const op = "agentic.Client.GetTask" - - if id == "" { - return nil, errors.E(op, "task ID is required", nil) - } - - endpoint := fmt.Sprintf("%s/api/tasks/%s", c.BaseURL, url.PathEscape(id)) - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) - if err != nil { - return nil, errors.E(op, "failed to create request", err) - } - - c.setHeaders(req) - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return nil, errors.E(op, "request failed", err) - } - defer resp.Body.Close() - - if err := c.checkResponse(resp); err != nil { - return nil, errors.E(op, "API error", err) - } - - var task Task - if err := json.NewDecoder(resp.Body).Decode(&task); err != nil { - return nil, errors.E(op, "failed to decode response", err) - } - - return &task, nil -} - -// ClaimTask claims a task for the current agent. -func (c *Client) ClaimTask(ctx context.Context, id string) (*Task, error) { - const op = "agentic.Client.ClaimTask" - - if id == "" { - return nil, errors.E(op, "task ID is required", nil) - } - - endpoint := fmt.Sprintf("%s/api/tasks/%s/claim", c.BaseURL, url.PathEscape(id)) - - // Include agent ID in the claim request if available - var body io.Reader - if c.AgentID != "" { - data, _ := json.Marshal(map[string]string{"agent_id": c.AgentID}) - body = bytes.NewReader(data) - } - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, body) - if err != nil { - return nil, errors.E(op, "failed to create request", err) - } - - c.setHeaders(req) - if body != nil { - req.Header.Set("Content-Type", "application/json") - } - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return nil, errors.E(op, "request failed", err) - } - defer resp.Body.Close() - - if err := c.checkResponse(resp); err != nil { - return nil, errors.E(op, "API error", err) - } - - // Read body once to allow multiple decode attempts - bodyData, err := io.ReadAll(resp.Body) - if err != nil { - return nil, errors.E(op, "failed to read response", err) - } - - // Try decoding as ClaimResponse first - var result ClaimResponse - if err := json.Unmarshal(bodyData, &result); err == nil && result.Task != nil { - return result.Task, nil - } - - // Try decoding as just a Task for simpler API responses - var task Task - if err := json.Unmarshal(bodyData, &task); err != nil { - return nil, errors.E(op, "failed to decode response", err) - } - - return &task, nil -} - -// UpdateTask updates a task with new status, progress, or notes. -func (c *Client) UpdateTask(ctx context.Context, id string, update TaskUpdate) error { - const op = "agentic.Client.UpdateTask" - - if id == "" { - return errors.E(op, "task ID is required", nil) - } - - endpoint := fmt.Sprintf("%s/api/tasks/%s", c.BaseURL, url.PathEscape(id)) - - data, err := json.Marshal(update) - if err != nil { - return errors.E(op, "failed to marshal update", err) - } - - req, err := http.NewRequestWithContext(ctx, http.MethodPatch, endpoint, bytes.NewReader(data)) - if err != nil { - return errors.E(op, "failed to create request", err) - } - - c.setHeaders(req) - req.Header.Set("Content-Type", "application/json") - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return errors.E(op, "request failed", err) - } - defer resp.Body.Close() - - if err := c.checkResponse(resp); err != nil { - return errors.E(op, "API error", err) - } - - return nil -} - -// CompleteTask marks a task as completed with the given result. -func (c *Client) CompleteTask(ctx context.Context, id string, result TaskResult) error { - const op = "agentic.Client.CompleteTask" - - if id == "" { - return errors.E(op, "task ID is required", nil) - } - - endpoint := fmt.Sprintf("%s/api/tasks/%s/complete", c.BaseURL, url.PathEscape(id)) - - data, err := json.Marshal(result) - if err != nil { - return errors.E(op, "failed to marshal result", err) - } - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(data)) - if err != nil { - return errors.E(op, "failed to create request", err) - } - - c.setHeaders(req) - req.Header.Set("Content-Type", "application/json") - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return errors.E(op, "request failed", err) - } - defer resp.Body.Close() - - if err := c.checkResponse(resp); err != nil { - return errors.E(op, "API error", err) - } - - return nil -} - -// setHeaders adds common headers to the request. -func (c *Client) setHeaders(req *http.Request) { - req.Header.Set("Authorization", "Bearer "+c.Token) - req.Header.Set("Accept", "application/json") - req.Header.Set("User-Agent", "core-agentic-client/1.0") -} - -// checkResponse checks if the response indicates an error. -func (c *Client) checkResponse(resp *http.Response) error { - if resp.StatusCode >= 200 && resp.StatusCode < 300 { - return nil - } - - body, _ := io.ReadAll(resp.Body) - - // Try to parse as APIError - var apiErr APIError - if err := json.Unmarshal(body, &apiErr); err == nil && apiErr.Message != "" { - apiErr.Code = resp.StatusCode - return &apiErr - } - - // Return generic error - return &APIError{ - Code: resp.StatusCode, - Message: fmt.Sprintf("HTTP %d: %s", resp.StatusCode, http.StatusText(resp.StatusCode)), - Details: string(body), - } -} - -// mustReadAll reads all bytes from a reader, returning empty slice on error. -func mustReadAll(r io.Reader) []byte { - data, _ := io.ReadAll(r) - return data -} - -// Ping tests the connection to the API server. -func (c *Client) Ping(ctx context.Context) error { - const op = "agentic.Client.Ping" - - endpoint := c.BaseURL + "/api/health" - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) - if err != nil { - return errors.E(op, "failed to create request", err) - } - - c.setHeaders(req) - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return errors.E(op, "request failed", err) - } - defer resp.Body.Close() - - if resp.StatusCode >= 400 { - return errors.E(op, fmt.Sprintf("server returned status %d", resp.StatusCode), nil) - } - - return nil -} diff --git a/pkg/agentic/client_test.go b/pkg/agentic/client_test.go deleted file mode 100644 index 89ff93d..0000000 --- a/pkg/agentic/client_test.go +++ /dev/null @@ -1,356 +0,0 @@ -package agentic - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// Test fixtures -var testTask = Task{ - ID: "task-123", - Title: "Implement feature X", - Description: "Add the new feature X to the system", - Priority: PriorityHigh, - Status: StatusPending, - Labels: []string{"feature", "backend"}, - Files: []string{"pkg/feature/feature.go"}, - CreatedAt: time.Now().Add(-24 * time.Hour), - Project: "core", -} - -var testTasks = []Task{ - testTask, - { - ID: "task-456", - Title: "Fix bug Y", - Description: "Fix the bug in component Y", - Priority: PriorityCritical, - Status: StatusPending, - Labels: []string{"bug", "urgent"}, - CreatedAt: time.Now().Add(-2 * time.Hour), - Project: "core", - }, -} - -func TestNewClient_Good(t *testing.T) { - client := NewClient("https://api.example.com", "test-token") - - assert.Equal(t, "https://api.example.com", client.BaseURL) - assert.Equal(t, "test-token", client.Token) - assert.NotNil(t, client.HTTPClient) -} - -func TestNewClient_Good_TrailingSlash(t *testing.T) { - client := NewClient("https://api.example.com/", "test-token") - - assert.Equal(t, "https://api.example.com", client.BaseURL) -} - -func TestNewClientFromConfig_Good(t *testing.T) { - cfg := &Config{ - BaseURL: "https://api.example.com", - Token: "config-token", - AgentID: "agent-001", - } - - client := NewClientFromConfig(cfg) - - assert.Equal(t, "https://api.example.com", client.BaseURL) - assert.Equal(t, "config-token", client.Token) - assert.Equal(t, "agent-001", client.AgentID) -} - -func TestClient_ListTasks_Good(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, http.MethodGet, r.Method) - assert.Equal(t, "/api/tasks", r.URL.Path) - assert.Equal(t, "Bearer test-token", r.Header.Get("Authorization")) - - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(testTasks) - })) - defer server.Close() - - client := NewClient(server.URL, "test-token") - tasks, err := client.ListTasks(context.Background(), ListOptions{}) - - require.NoError(t, err) - assert.Len(t, tasks, 2) - assert.Equal(t, "task-123", tasks[0].ID) - assert.Equal(t, "task-456", tasks[1].ID) -} - -func TestClient_ListTasks_Good_WithFilters(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - query := r.URL.Query() - assert.Equal(t, "pending", query.Get("status")) - assert.Equal(t, "high", query.Get("priority")) - assert.Equal(t, "core", query.Get("project")) - assert.Equal(t, "10", query.Get("limit")) - assert.Equal(t, "bug,urgent", query.Get("labels")) - - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode([]Task{testTask}) - })) - defer server.Close() - - client := NewClient(server.URL, "test-token") - opts := ListOptions{ - Status: StatusPending, - Priority: PriorityHigh, - Project: "core", - Limit: 10, - Labels: []string{"bug", "urgent"}, - } - - tasks, err := client.ListTasks(context.Background(), opts) - - require.NoError(t, err) - assert.Len(t, tasks, 1) -} - -func TestClient_ListTasks_Bad_ServerError(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusInternalServerError) - json.NewEncoder(w).Encode(APIError{Message: "internal error"}) - })) - defer server.Close() - - client := NewClient(server.URL, "test-token") - tasks, err := client.ListTasks(context.Background(), ListOptions{}) - - assert.Error(t, err) - assert.Nil(t, tasks) - assert.Contains(t, err.Error(), "internal error") -} - -func TestClient_GetTask_Good(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, http.MethodGet, r.Method) - assert.Equal(t, "/api/tasks/task-123", r.URL.Path) - - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(testTask) - })) - defer server.Close() - - client := NewClient(server.URL, "test-token") - task, err := client.GetTask(context.Background(), "task-123") - - require.NoError(t, err) - assert.Equal(t, "task-123", task.ID) - assert.Equal(t, "Implement feature X", task.Title) - assert.Equal(t, PriorityHigh, task.Priority) -} - -func TestClient_GetTask_Bad_EmptyID(t *testing.T) { - client := NewClient("https://api.example.com", "test-token") - task, err := client.GetTask(context.Background(), "") - - assert.Error(t, err) - assert.Nil(t, task) - assert.Contains(t, err.Error(), "task ID is required") -} - -func TestClient_GetTask_Bad_NotFound(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusNotFound) - json.NewEncoder(w).Encode(APIError{Message: "task not found"}) - })) - defer server.Close() - - client := NewClient(server.URL, "test-token") - task, err := client.GetTask(context.Background(), "nonexistent") - - assert.Error(t, err) - assert.Nil(t, task) - assert.Contains(t, err.Error(), "task not found") -} - -func TestClient_ClaimTask_Good(t *testing.T) { - claimedTask := testTask - claimedTask.Status = StatusInProgress - claimedTask.ClaimedBy = "agent-001" - - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, http.MethodPost, r.Method) - assert.Equal(t, "/api/tasks/task-123/claim", r.URL.Path) - - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(ClaimResponse{Task: &claimedTask}) - })) - defer server.Close() - - client := NewClient(server.URL, "test-token") - client.AgentID = "agent-001" - task, err := client.ClaimTask(context.Background(), "task-123") - - require.NoError(t, err) - assert.Equal(t, StatusInProgress, task.Status) - assert.Equal(t, "agent-001", task.ClaimedBy) -} - -func TestClient_ClaimTask_Good_SimpleResponse(t *testing.T) { - // Some APIs might return just the task without wrapping - claimedTask := testTask - claimedTask.Status = StatusInProgress - - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(claimedTask) - })) - defer server.Close() - - client := NewClient(server.URL, "test-token") - task, err := client.ClaimTask(context.Background(), "task-123") - - require.NoError(t, err) - assert.Equal(t, "task-123", task.ID) -} - -func TestClient_ClaimTask_Bad_EmptyID(t *testing.T) { - client := NewClient("https://api.example.com", "test-token") - task, err := client.ClaimTask(context.Background(), "") - - assert.Error(t, err) - assert.Nil(t, task) - assert.Contains(t, err.Error(), "task ID is required") -} - -func TestClient_ClaimTask_Bad_AlreadyClaimed(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusConflict) - json.NewEncoder(w).Encode(APIError{Message: "task already claimed"}) - })) - defer server.Close() - - client := NewClient(server.URL, "test-token") - task, err := client.ClaimTask(context.Background(), "task-123") - - assert.Error(t, err) - assert.Nil(t, task) - assert.Contains(t, err.Error(), "task already claimed") -} - -func TestClient_UpdateTask_Good(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, http.MethodPatch, r.Method) - assert.Equal(t, "/api/tasks/task-123", r.URL.Path) - assert.Equal(t, "application/json", r.Header.Get("Content-Type")) - - var update TaskUpdate - err := json.NewDecoder(r.Body).Decode(&update) - require.NoError(t, err) - assert.Equal(t, StatusInProgress, update.Status) - assert.Equal(t, 50, update.Progress) - - w.WriteHeader(http.StatusOK) - })) - defer server.Close() - - client := NewClient(server.URL, "test-token") - err := client.UpdateTask(context.Background(), "task-123", TaskUpdate{ - Status: StatusInProgress, - Progress: 50, - Notes: "Making progress", - }) - - assert.NoError(t, err) -} - -func TestClient_UpdateTask_Bad_EmptyID(t *testing.T) { - client := NewClient("https://api.example.com", "test-token") - err := client.UpdateTask(context.Background(), "", TaskUpdate{}) - - assert.Error(t, err) - assert.Contains(t, err.Error(), "task ID is required") -} - -func TestClient_CompleteTask_Good(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, http.MethodPost, r.Method) - assert.Equal(t, "/api/tasks/task-123/complete", r.URL.Path) - - var result TaskResult - err := json.NewDecoder(r.Body).Decode(&result) - require.NoError(t, err) - assert.True(t, result.Success) - assert.Equal(t, "Feature implemented", result.Output) - - w.WriteHeader(http.StatusOK) - })) - defer server.Close() - - client := NewClient(server.URL, "test-token") - err := client.CompleteTask(context.Background(), "task-123", TaskResult{ - Success: true, - Output: "Feature implemented", - Artifacts: []string{"pkg/feature/feature.go"}, - }) - - assert.NoError(t, err) -} - -func TestClient_CompleteTask_Bad_EmptyID(t *testing.T) { - client := NewClient("https://api.example.com", "test-token") - err := client.CompleteTask(context.Background(), "", TaskResult{}) - - assert.Error(t, err) - assert.Contains(t, err.Error(), "task ID is required") -} - -func TestClient_Ping_Good(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "/api/health", r.URL.Path) - w.WriteHeader(http.StatusOK) - })) - defer server.Close() - - client := NewClient(server.URL, "test-token") - err := client.Ping(context.Background()) - - assert.NoError(t, err) -} - -func TestClient_Ping_Bad_ServerDown(t *testing.T) { - client := NewClient("http://localhost:99999", "test-token") - client.HTTPClient.Timeout = 100 * time.Millisecond - - err := client.Ping(context.Background()) - - assert.Error(t, err) - assert.Contains(t, err.Error(), "request failed") -} - -func TestAPIError_Error_Good(t *testing.T) { - err := &APIError{ - Code: 404, - Message: "task not found", - } - - assert.Equal(t, "task not found", err.Error()) - - err.Details = "task-123 does not exist" - assert.Equal(t, "task not found: task-123 does not exist", err.Error()) -} - -func TestTaskStatus_Good(t *testing.T) { - assert.Equal(t, TaskStatus("pending"), StatusPending) - assert.Equal(t, TaskStatus("in_progress"), StatusInProgress) - assert.Equal(t, TaskStatus("completed"), StatusCompleted) - assert.Equal(t, TaskStatus("blocked"), StatusBlocked) -} - -func TestTaskPriority_Good(t *testing.T) { - assert.Equal(t, TaskPriority("critical"), PriorityCritical) - assert.Equal(t, TaskPriority("high"), PriorityHigh) - assert.Equal(t, TaskPriority("medium"), PriorityMedium) - assert.Equal(t, TaskPriority("low"), PriorityLow) -} diff --git a/pkg/agentic/completion.go b/pkg/agentic/completion.go deleted file mode 100644 index 3107c87..0000000 --- a/pkg/agentic/completion.go +++ /dev/null @@ -1,338 +0,0 @@ -// Package agentic provides AI collaboration features for task management. -package agentic - -import ( - "bytes" - "context" - "fmt" - "os/exec" - "strings" - - "github.com/host-uk/core/pkg/errors" -) - -// PROptions contains options for creating a pull request. -type PROptions struct { - // Title is the PR title. - Title string `json:"title"` - // Body is the PR description. - Body string `json:"body"` - // Draft marks the PR as a draft. - Draft bool `json:"draft"` - // Labels are labels to add to the PR. - Labels []string `json:"labels"` - // Base is the base branch (defaults to main). - Base string `json:"base"` -} - -// AutoCommit creates a git commit with a task reference. -// The commit message follows the format: -// -// feat(scope): description -// -// Task: #123 -// Co-Authored-By: Claude -func AutoCommit(ctx context.Context, task *Task, dir string, message string) error { - const op = "agentic.AutoCommit" - - if task == nil { - return errors.E(op, "task is required", nil) - } - - if message == "" { - return errors.E(op, "commit message is required", nil) - } - - // Build full commit message - fullMessage := buildCommitMessage(task, message) - - // Stage all changes - if _, err := runGitCommandCtx(ctx, dir, "add", "-A"); err != nil { - return errors.E(op, "failed to stage changes", err) - } - - // Create commit - if _, err := runGitCommandCtx(ctx, dir, "commit", "-m", fullMessage); err != nil { - return errors.E(op, "failed to create commit", err) - } - - return nil -} - -// buildCommitMessage formats a commit message with task reference. -func buildCommitMessage(task *Task, message string) string { - var sb strings.Builder - - // Write the main message - sb.WriteString(message) - sb.WriteString("\n\n") - - // Add task reference - sb.WriteString("Task: #") - sb.WriteString(task.ID) - sb.WriteString("\n") - - // Add co-author - sb.WriteString("Co-Authored-By: Claude \n") - - return sb.String() -} - -// CreatePR creates a pull request using the gh CLI. -func CreatePR(ctx context.Context, task *Task, dir string, opts PROptions) (string, error) { - const op = "agentic.CreatePR" - - if task == nil { - return "", errors.E(op, "task is required", nil) - } - - // Build title if not provided - title := opts.Title - if title == "" { - title = task.Title - } - - // Build body if not provided - body := opts.Body - if body == "" { - body = buildPRBody(task) - } - - // Build gh command arguments - args := []string{"pr", "create", "--title", title, "--body", body} - - if opts.Draft { - args = append(args, "--draft") - } - - if opts.Base != "" { - args = append(args, "--base", opts.Base) - } - - for _, label := range opts.Labels { - args = append(args, "--label", label) - } - - // Run gh pr create - output, err := runCommandCtx(ctx, dir, "gh", args...) - if err != nil { - return "", errors.E(op, "failed to create PR", err) - } - - // Extract PR URL from output - prURL := strings.TrimSpace(output) - - return prURL, nil -} - -// buildPRBody creates a PR body from task details. -func buildPRBody(task *Task) string { - var sb strings.Builder - - sb.WriteString("## Summary\n\n") - sb.WriteString(task.Description) - sb.WriteString("\n\n") - - sb.WriteString("## Task Reference\n\n") - sb.WriteString("- Task ID: #") - sb.WriteString(task.ID) - sb.WriteString("\n") - sb.WriteString("- Priority: ") - sb.WriteString(string(task.Priority)) - sb.WriteString("\n") - - if len(task.Labels) > 0 { - sb.WriteString("- Labels: ") - sb.WriteString(strings.Join(task.Labels, ", ")) - sb.WriteString("\n") - } - - sb.WriteString("\n---\n") - sb.WriteString("Generated with AI assistance\n") - - return sb.String() -} - -// SyncStatus syncs the task status back to the agentic service. -func SyncStatus(ctx context.Context, client *Client, task *Task, update TaskUpdate) error { - const op = "agentic.SyncStatus" - - if client == nil { - return errors.E(op, "client is required", nil) - } - - if task == nil { - return errors.E(op, "task is required", nil) - } - - return client.UpdateTask(ctx, task.ID, update) -} - -// CommitAndSync commits changes and syncs task status. -func CommitAndSync(ctx context.Context, client *Client, task *Task, dir string, message string, progress int) error { - const op = "agentic.CommitAndSync" - - // Create commit - if err := AutoCommit(ctx, task, dir, message); err != nil { - return errors.E(op, "failed to commit", err) - } - - // Sync status if client provided - if client != nil { - update := TaskUpdate{ - Status: StatusInProgress, - Progress: progress, - Notes: "Committed: " + message, - } - - if err := SyncStatus(ctx, client, task, update); err != nil { - // Log but don't fail on sync errors - return errors.E(op, "commit succeeded but sync failed", err) - } - } - - return nil -} - -// PushChanges pushes committed changes to the remote. -func PushChanges(ctx context.Context, dir string) error { - const op = "agentic.PushChanges" - - _, err := runGitCommandCtx(ctx, dir, "push") - if err != nil { - return errors.E(op, "failed to push changes", err) - } - - return nil -} - -// CreateBranch creates a new branch for the task. -func CreateBranch(ctx context.Context, task *Task, dir string) (string, error) { - const op = "agentic.CreateBranch" - - if task == nil { - return "", errors.E(op, "task is required", nil) - } - - // Generate branch name from task - branchName := generateBranchName(task) - - // Create and checkout branch - _, err := runGitCommandCtx(ctx, dir, "checkout", "-b", branchName) - if err != nil { - return "", errors.E(op, "failed to create branch", err) - } - - return branchName, nil -} - -// generateBranchName creates a branch name from task details. -func generateBranchName(task *Task) string { - // Determine prefix based on labels - prefix := "feat" - for _, label := range task.Labels { - switch strings.ToLower(label) { - case "bug", "bugfix", "fix": - prefix = "fix" - case "docs", "documentation": - prefix = "docs" - case "refactor": - prefix = "refactor" - case "test", "tests": - prefix = "test" - case "chore": - prefix = "chore" - } - } - - // Sanitize title for branch name - title := strings.ToLower(task.Title) - title = strings.Map(func(r rune) rune { - if (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') { - return r - } - if r == ' ' || r == '-' || r == '_' { - return '-' - } - return -1 - }, title) - - // Remove consecutive dashes - for strings.Contains(title, "--") { - title = strings.ReplaceAll(title, "--", "-") - } - title = strings.Trim(title, "-") - - // Truncate if too long - if len(title) > 40 { - title = title[:40] - title = strings.TrimRight(title, "-") - } - - return fmt.Sprintf("%s/%s-%s", prefix, task.ID, title) -} - -// runGitCommandCtx runs a git command with context. -func runGitCommandCtx(ctx context.Context, dir string, args ...string) (string, error) { - return runCommandCtx(ctx, dir, "git", args...) -} - -// runCommandCtx runs an arbitrary command with context. -func runCommandCtx(ctx context.Context, dir string, command string, args ...string) (string, error) { - cmd := exec.CommandContext(ctx, command, args...) - cmd.Dir = dir - - var stdout, stderr bytes.Buffer - cmd.Stdout = &stdout - cmd.Stderr = &stderr - - if err := cmd.Run(); err != nil { - if stderr.Len() > 0 { - return "", fmt.Errorf("%w: %s", err, stderr.String()) - } - return "", err - } - - return stdout.String(), nil -} - -// GetCurrentBranch returns the current git branch name. -func GetCurrentBranch(ctx context.Context, dir string) (string, error) { - const op = "agentic.GetCurrentBranch" - - output, err := runGitCommandCtx(ctx, dir, "rev-parse", "--abbrev-ref", "HEAD") - if err != nil { - return "", errors.E(op, "failed to get current branch", err) - } - - return strings.TrimSpace(output), nil -} - -// HasUncommittedChanges checks if there are uncommitted changes. -func HasUncommittedChanges(ctx context.Context, dir string) (bool, error) { - const op = "agentic.HasUncommittedChanges" - - output, err := runGitCommandCtx(ctx, dir, "status", "--porcelain") - if err != nil { - return false, errors.E(op, "failed to get git status", err) - } - - return strings.TrimSpace(output) != "", nil -} - -// GetDiff returns the current diff for staged and unstaged changes. -func GetDiff(ctx context.Context, dir string, staged bool) (string, error) { - const op = "agentic.GetDiff" - - args := []string{"diff"} - if staged { - args = append(args, "--staged") - } - - output, err := runGitCommandCtx(ctx, dir, args...) - if err != nil { - return "", errors.E(op, "failed to get diff", err) - } - - return output, nil -} diff --git a/pkg/agentic/completion_test.go b/pkg/agentic/completion_test.go deleted file mode 100644 index 068b640..0000000 --- a/pkg/agentic/completion_test.go +++ /dev/null @@ -1,198 +0,0 @@ -package agentic - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestBuildCommitMessage(t *testing.T) { - task := &Task{ - ID: "ABC123", - Title: "Test Task", - } - - message := buildCommitMessage(task, "add new feature") - - assert.Contains(t, message, "add new feature") - assert.Contains(t, message, "Task: #ABC123") - assert.Contains(t, message, "Co-Authored-By: Claude ") -} - -func TestBuildPRBody(t *testing.T) { - task := &Task{ - ID: "PR-456", - Title: "Add authentication", - Description: "Implement user authentication with OAuth2", - Priority: PriorityHigh, - Labels: []string{"enhancement", "security"}, - } - - body := buildPRBody(task) - - assert.Contains(t, body, "## Summary") - assert.Contains(t, body, "Implement user authentication with OAuth2") - assert.Contains(t, body, "## Task Reference") - assert.Contains(t, body, "Task ID: #PR-456") - assert.Contains(t, body, "Priority: high") - assert.Contains(t, body, "Labels: enhancement, security") - assert.Contains(t, body, "Generated with AI assistance") -} - -func TestBuildPRBody_NoLabels(t *testing.T) { - task := &Task{ - ID: "PR-789", - Title: "Fix bug", - Description: "Fix the login bug", - Priority: PriorityMedium, - Labels: nil, - } - - body := buildPRBody(task) - - assert.Contains(t, body, "## Summary") - assert.Contains(t, body, "Fix the login bug") - assert.NotContains(t, body, "Labels:") -} - -func TestGenerateBranchName(t *testing.T) { - tests := []struct { - name string - task *Task - expected string - }{ - { - name: "feature task", - task: &Task{ - ID: "123", - Title: "Add user authentication", - Labels: []string{"enhancement"}, - }, - expected: "feat/123-add-user-authentication", - }, - { - name: "bug fix task", - task: &Task{ - ID: "456", - Title: "Fix login error", - Labels: []string{"bug"}, - }, - expected: "fix/456-fix-login-error", - }, - { - name: "docs task", - task: &Task{ - ID: "789", - Title: "Update README", - Labels: []string{"documentation"}, - }, - expected: "docs/789-update-readme", - }, - { - name: "refactor task", - task: &Task{ - ID: "101", - Title: "Refactor auth module", - Labels: []string{"refactor"}, - }, - expected: "refactor/101-refactor-auth-module", - }, - { - name: "test task", - task: &Task{ - ID: "202", - Title: "Add unit tests", - Labels: []string{"test"}, - }, - expected: "test/202-add-unit-tests", - }, - { - name: "chore task", - task: &Task{ - ID: "303", - Title: "Update dependencies", - Labels: []string{"chore"}, - }, - expected: "chore/303-update-dependencies", - }, - { - name: "long title truncated", - task: &Task{ - ID: "404", - Title: "This is a very long title that should be truncated to fit the branch name limit", - Labels: nil, - }, - expected: "feat/404-this-is-a-very-long-title-that-should-be", - }, - { - name: "special characters removed", - task: &Task{ - ID: "505", - Title: "Fix: user's auth (OAuth2) [important]", - Labels: nil, - }, - expected: "feat/505-fix-users-auth-oauth2-important", - }, - { - name: "no labels defaults to feat", - task: &Task{ - ID: "606", - Title: "New feature", - Labels: nil, - }, - expected: "feat/606-new-feature", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := generateBranchName(tt.task) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestAutoCommit_Bad_NilTask(t *testing.T) { - err := AutoCommit(nil, nil, ".", "test message") - assert.Error(t, err) - assert.Contains(t, err.Error(), "task is required") -} - -func TestAutoCommit_Bad_EmptyMessage(t *testing.T) { - task := &Task{ID: "123", Title: "Test"} - err := AutoCommit(nil, task, ".", "") - assert.Error(t, err) - assert.Contains(t, err.Error(), "commit message is required") -} - -func TestSyncStatus_Bad_NilClient(t *testing.T) { - task := &Task{ID: "123", Title: "Test"} - update := TaskUpdate{Status: StatusInProgress} - - err := SyncStatus(nil, nil, task, update) - assert.Error(t, err) - assert.Contains(t, err.Error(), "client is required") -} - -func TestSyncStatus_Bad_NilTask(t *testing.T) { - client := &Client{BaseURL: "http://test"} - update := TaskUpdate{Status: StatusInProgress} - - err := SyncStatus(nil, client, nil, update) - assert.Error(t, err) - assert.Contains(t, err.Error(), "task is required") -} - -func TestCreateBranch_Bad_NilTask(t *testing.T) { - branch, err := CreateBranch(nil, nil, ".") - assert.Error(t, err) - assert.Empty(t, branch) - assert.Contains(t, err.Error(), "task is required") -} - -func TestCreatePR_Bad_NilTask(t *testing.T) { - url, err := CreatePR(nil, nil, ".", PROptions{}) - assert.Error(t, err) - assert.Empty(t, url) - assert.Contains(t, err.Error(), "task is required") -} diff --git a/pkg/agentic/config.go b/pkg/agentic/config.go deleted file mode 100644 index 3ad088a..0000000 --- a/pkg/agentic/config.go +++ /dev/null @@ -1,199 +0,0 @@ -package agentic - -import ( - "bufio" - "os" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/errors" - "gopkg.in/yaml.v3" -) - -// Config holds the configuration for connecting to the core-agentic service. -type Config struct { - // BaseURL is the URL of the core-agentic API server. - BaseURL string `yaml:"base_url" json:"base_url"` - // Token is the authentication token for API requests. - Token string `yaml:"token" json:"token"` - // DefaultProject is the project to use when none is specified. - DefaultProject string `yaml:"default_project" json:"default_project"` - // AgentID is the identifier for this agent (optional, used for claiming tasks). - AgentID string `yaml:"agent_id" json:"agent_id"` -} - -// configFileName is the name of the YAML config file. -const configFileName = "agentic.yaml" - -// envFileName is the name of the environment file. -const envFileName = ".env" - -// DefaultBaseURL is the default API endpoint if none is configured. -const DefaultBaseURL = "https://api.core-agentic.dev" - -// LoadConfig loads the agentic configuration from the specified directory. -// It first checks for a .env file, then falls back to ~/.core/agentic.yaml. -// If dir is empty, it checks the current directory first. -// -// Environment variables take precedence: -// - AGENTIC_BASE_URL: API base URL -// - AGENTIC_TOKEN: Authentication token -// - AGENTIC_PROJECT: Default project -// - AGENTIC_AGENT_ID: Agent identifier -func LoadConfig(dir string) (*Config, error) { - cfg := &Config{ - BaseURL: DefaultBaseURL, - } - - // Try loading from .env file in the specified directory - if dir != "" { - envPath := filepath.Join(dir, envFileName) - if err := loadEnvFile(envPath, cfg); err == nil { - // Successfully loaded from .env - applyEnvOverrides(cfg) - if cfg.Token != "" { - return cfg, nil - } - } - } - - // Try loading from current directory .env - if dir == "" { - cwd, err := os.Getwd() - if err == nil { - envPath := filepath.Join(cwd, envFileName) - if err := loadEnvFile(envPath, cfg); err == nil { - applyEnvOverrides(cfg) - if cfg.Token != "" { - return cfg, nil - } - } - } - } - - // Try loading from ~/.core/agentic.yaml - homeDir, err := os.UserHomeDir() - if err != nil { - return nil, errors.E("agentic.LoadConfig", "failed to get home directory", err) - } - - configPath := filepath.Join(homeDir, ".core", configFileName) - if err := loadYAMLConfig(configPath, cfg); err != nil && !os.IsNotExist(err) { - return nil, errors.E("agentic.LoadConfig", "failed to load config", err) - } - - // Apply environment variable overrides - applyEnvOverrides(cfg) - - // Validate configuration - if cfg.Token == "" { - return nil, errors.E("agentic.LoadConfig", "no authentication token configured", nil) - } - - return cfg, nil -} - -// loadEnvFile reads a .env file and extracts agentic configuration. -func loadEnvFile(path string, cfg *Config) error { - file, err := os.Open(path) - if err != nil { - return err - } - defer file.Close() - - scanner := bufio.NewScanner(file) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - - // Skip empty lines and comments - if line == "" || strings.HasPrefix(line, "#") { - continue - } - - // Parse KEY=value - parts := strings.SplitN(line, "=", 2) - if len(parts) != 2 { - continue - } - - key := strings.TrimSpace(parts[0]) - value := strings.TrimSpace(parts[1]) - - // Remove quotes if present - value = strings.Trim(value, `"'`) - - switch key { - case "AGENTIC_BASE_URL": - cfg.BaseURL = value - case "AGENTIC_TOKEN": - cfg.Token = value - case "AGENTIC_PROJECT": - cfg.DefaultProject = value - case "AGENTIC_AGENT_ID": - cfg.AgentID = value - } - } - - return scanner.Err() -} - -// loadYAMLConfig reads configuration from a YAML file. -func loadYAMLConfig(path string, cfg *Config) error { - data, err := os.ReadFile(path) - if err != nil { - return err - } - - return yaml.Unmarshal(data, cfg) -} - -// applyEnvOverrides applies environment variable overrides to the config. -func applyEnvOverrides(cfg *Config) { - if v := os.Getenv("AGENTIC_BASE_URL"); v != "" { - cfg.BaseURL = v - } - if v := os.Getenv("AGENTIC_TOKEN"); v != "" { - cfg.Token = v - } - if v := os.Getenv("AGENTIC_PROJECT"); v != "" { - cfg.DefaultProject = v - } - if v := os.Getenv("AGENTIC_AGENT_ID"); v != "" { - cfg.AgentID = v - } -} - -// SaveConfig saves the configuration to ~/.core/agentic.yaml. -func SaveConfig(cfg *Config) error { - homeDir, err := os.UserHomeDir() - if err != nil { - return errors.E("agentic.SaveConfig", "failed to get home directory", err) - } - - configDir := filepath.Join(homeDir, ".core") - if err := os.MkdirAll(configDir, 0755); err != nil { - return errors.E("agentic.SaveConfig", "failed to create config directory", err) - } - - configPath := filepath.Join(configDir, configFileName) - - data, err := yaml.Marshal(cfg) - if err != nil { - return errors.E("agentic.SaveConfig", "failed to marshal config", err) - } - - if err := os.WriteFile(configPath, data, 0600); err != nil { - return errors.E("agentic.SaveConfig", "failed to write config file", err) - } - - return nil -} - -// ConfigPath returns the path to the config file in the user's home directory. -func ConfigPath() (string, error) { - homeDir, err := os.UserHomeDir() - if err != nil { - return "", errors.E("agentic.ConfigPath", "failed to get home directory", err) - } - return filepath.Join(homeDir, ".core", configFileName), nil -} diff --git a/pkg/agentic/config_test.go b/pkg/agentic/config_test.go deleted file mode 100644 index 6e88478..0000000 --- a/pkg/agentic/config_test.go +++ /dev/null @@ -1,185 +0,0 @@ -package agentic - -import ( - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestLoadConfig_Good_FromEnvFile(t *testing.T) { - // Create temp directory with .env file - tmpDir, err := os.MkdirTemp("", "agentic-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - envContent := ` -AGENTIC_BASE_URL=https://test.api.com -AGENTIC_TOKEN=test-token-123 -AGENTIC_PROJECT=my-project -AGENTIC_AGENT_ID=agent-001 -` - err = os.WriteFile(filepath.Join(tmpDir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - cfg, err := LoadConfig(tmpDir) - - require.NoError(t, err) - assert.Equal(t, "https://test.api.com", cfg.BaseURL) - assert.Equal(t, "test-token-123", cfg.Token) - assert.Equal(t, "my-project", cfg.DefaultProject) - assert.Equal(t, "agent-001", cfg.AgentID) -} - -func TestLoadConfig_Good_FromEnvVars(t *testing.T) { - // Create temp directory with .env file (partial config) - tmpDir, err := os.MkdirTemp("", "agentic-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - envContent := ` -AGENTIC_TOKEN=env-file-token -` - err = os.WriteFile(filepath.Join(tmpDir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - // Set environment variables that should override - os.Setenv("AGENTIC_BASE_URL", "https://env-override.com") - os.Setenv("AGENTIC_TOKEN", "env-override-token") - defer func() { - os.Unsetenv("AGENTIC_BASE_URL") - os.Unsetenv("AGENTIC_TOKEN") - }() - - cfg, err := LoadConfig(tmpDir) - - require.NoError(t, err) - assert.Equal(t, "https://env-override.com", cfg.BaseURL) - assert.Equal(t, "env-override-token", cfg.Token) -} - -func TestLoadConfig_Bad_NoToken(t *testing.T) { - // Create temp directory without config - tmpDir, err := os.MkdirTemp("", "agentic-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - // Create empty .env - err = os.WriteFile(filepath.Join(tmpDir, ".env"), []byte(""), 0644) - require.NoError(t, err) - - // Ensure no env vars are set - os.Unsetenv("AGENTIC_TOKEN") - os.Unsetenv("AGENTIC_BASE_URL") - - _, err = LoadConfig(tmpDir) - - assert.Error(t, err) - assert.Contains(t, err.Error(), "no authentication token") -} - -func TestLoadConfig_Good_EnvFileWithQuotes(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "agentic-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - // Test with quoted values - envContent := ` -AGENTIC_TOKEN="quoted-token" -AGENTIC_BASE_URL='single-quoted-url' -` - err = os.WriteFile(filepath.Join(tmpDir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - cfg, err := LoadConfig(tmpDir) - - require.NoError(t, err) - assert.Equal(t, "quoted-token", cfg.Token) - assert.Equal(t, "single-quoted-url", cfg.BaseURL) -} - -func TestLoadConfig_Good_EnvFileWithComments(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "agentic-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - envContent := ` -# This is a comment -AGENTIC_TOKEN=token-with-comments - -# Another comment -AGENTIC_PROJECT=commented-project -` - err = os.WriteFile(filepath.Join(tmpDir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - cfg, err := LoadConfig(tmpDir) - - require.NoError(t, err) - assert.Equal(t, "token-with-comments", cfg.Token) - assert.Equal(t, "commented-project", cfg.DefaultProject) -} - -func TestSaveConfig_Good(t *testing.T) { - // Create temp home directory - tmpHome, err := os.MkdirTemp("", "agentic-home") - require.NoError(t, err) - defer os.RemoveAll(tmpHome) - - // Override HOME for the test - originalHome := os.Getenv("HOME") - os.Setenv("HOME", tmpHome) - defer os.Setenv("HOME", originalHome) - - cfg := &Config{ - BaseURL: "https://saved.api.com", - Token: "saved-token", - DefaultProject: "saved-project", - AgentID: "saved-agent", - } - - err = SaveConfig(cfg) - require.NoError(t, err) - - // Verify file was created - configPath := filepath.Join(tmpHome, ".core", "agentic.yaml") - _, err = os.Stat(configPath) - assert.NoError(t, err) - - // Read back the config - data, err := os.ReadFile(configPath) - require.NoError(t, err) - assert.Contains(t, string(data), "saved.api.com") - assert.Contains(t, string(data), "saved-token") -} - -func TestConfigPath_Good(t *testing.T) { - path, err := ConfigPath() - - require.NoError(t, err) - assert.Contains(t, path, ".core") - assert.Contains(t, path, "agentic.yaml") -} - -func TestLoadConfig_Good_DefaultBaseURL(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "agentic-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - // Only provide token, should use default base URL - envContent := ` -AGENTIC_TOKEN=test-token -` - err = os.WriteFile(filepath.Join(tmpDir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - // Clear any env overrides - os.Unsetenv("AGENTIC_BASE_URL") - - cfg, err := LoadConfig(tmpDir) - - require.NoError(t, err) - assert.Equal(t, DefaultBaseURL, cfg.BaseURL) -} diff --git a/pkg/agentic/context.go b/pkg/agentic/context.go deleted file mode 100644 index a31ba63..0000000 --- a/pkg/agentic/context.go +++ /dev/null @@ -1,335 +0,0 @@ -// Package agentic provides AI collaboration features for task management. -package agentic - -import ( - "bytes" - "os" - "os/exec" - "path/filepath" - "regexp" - "strings" - - "github.com/host-uk/core/pkg/errors" -) - -// FileContent represents the content of a file for AI context. -type FileContent struct { - // Path is the relative path to the file. - Path string `json:"path"` - // Content is the file content. - Content string `json:"content"` - // Language is the detected programming language. - Language string `json:"language"` -} - -// TaskContext contains gathered context for AI collaboration. -type TaskContext struct { - // Task is the task being worked on. - Task *Task `json:"task"` - // Files is a list of relevant file contents. - Files []FileContent `json:"files"` - // GitStatus is the current git status output. - GitStatus string `json:"git_status"` - // RecentCommits is the recent commit log. - RecentCommits string `json:"recent_commits"` - // RelatedCode contains code snippets related to the task. - RelatedCode []FileContent `json:"related_code"` -} - -// BuildTaskContext gathers context for AI collaboration on a task. -func BuildTaskContext(task *Task, dir string) (*TaskContext, error) { - const op = "agentic.BuildTaskContext" - - if task == nil { - return nil, errors.E(op, "task is required", nil) - } - - if dir == "" { - cwd, err := os.Getwd() - if err != nil { - return nil, errors.E(op, "failed to get working directory", err) - } - dir = cwd - } - - ctx := &TaskContext{ - Task: task, - } - - // Gather files mentioned in the task - files, err := GatherRelatedFiles(task, dir) - if err != nil { - // Non-fatal: continue without files - files = nil - } - ctx.Files = files - - // Get git status - gitStatus, _ := runGitCommand(dir, "status", "--porcelain") - ctx.GitStatus = gitStatus - - // Get recent commits - recentCommits, _ := runGitCommand(dir, "log", "--oneline", "-10") - ctx.RecentCommits = recentCommits - - // Find related code by searching for keywords - relatedCode, err := findRelatedCode(task, dir) - if err != nil { - relatedCode = nil - } - ctx.RelatedCode = relatedCode - - return ctx, nil -} - -// GatherRelatedFiles reads files mentioned in the task. -func GatherRelatedFiles(task *Task, dir string) ([]FileContent, error) { - const op = "agentic.GatherRelatedFiles" - - if task == nil { - return nil, errors.E(op, "task is required", nil) - } - - var files []FileContent - - // Read files explicitly mentioned in the task - for _, relPath := range task.Files { - fullPath := filepath.Join(dir, relPath) - - content, err := os.ReadFile(fullPath) - if err != nil { - // Skip files that don't exist - continue - } - - files = append(files, FileContent{ - Path: relPath, - Content: string(content), - Language: detectLanguage(relPath), - }) - } - - return files, nil -} - -// findRelatedCode searches for code related to the task by keywords. -func findRelatedCode(task *Task, dir string) ([]FileContent, error) { - const op = "agentic.findRelatedCode" - - if task == nil { - return nil, errors.E(op, "task is required", nil) - } - - // Extract keywords from title and description - keywords := extractKeywords(task.Title + " " + task.Description) - if len(keywords) == 0 { - return nil, nil - } - - var files []FileContent - seen := make(map[string]bool) - - // Search for each keyword using git grep - for _, keyword := range keywords { - if len(keyword) < 3 { - continue - } - - output, err := runGitCommand(dir, "grep", "-l", "-i", keyword, "--", "*.go", "*.ts", "*.js", "*.py") - if err != nil { - continue - } - - // Parse matched files - for _, line := range strings.Split(output, "\n") { - line = strings.TrimSpace(line) - if line == "" || seen[line] { - continue - } - seen[line] = true - - // Limit to 10 related files - if len(files) >= 10 { - break - } - - fullPath := filepath.Join(dir, line) - content, err := os.ReadFile(fullPath) - if err != nil { - continue - } - - // Truncate large files - contentStr := string(content) - if len(contentStr) > 5000 { - contentStr = contentStr[:5000] + "\n... (truncated)" - } - - files = append(files, FileContent{ - Path: line, - Content: contentStr, - Language: detectLanguage(line), - }) - } - - if len(files) >= 10 { - break - } - } - - return files, nil -} - -// extractKeywords extracts meaningful words from text for searching. -func extractKeywords(text string) []string { - // Remove common words and extract identifiers - text = strings.ToLower(text) - - // Split by non-alphanumeric characters - re := regexp.MustCompile(`[^a-zA-Z0-9]+`) - words := re.Split(text, -1) - - // Filter stop words and short words - stopWords := map[string]bool{ - "the": true, "a": true, "an": true, "and": true, "or": true, "but": true, - "in": true, "on": true, "at": true, "to": true, "for": true, "of": true, - "with": true, "by": true, "from": true, "is": true, "are": true, "was": true, - "be": true, "been": true, "being": true, "have": true, "has": true, "had": true, - "do": true, "does": true, "did": true, "will": true, "would": true, "could": true, - "should": true, "may": true, "might": true, "must": true, "shall": true, - "this": true, "that": true, "these": true, "those": true, "it": true, - "add": true, "create": true, "update": true, "fix": true, "remove": true, - "implement": true, "new": true, "file": true, "code": true, - } - - var keywords []string - for _, word := range words { - word = strings.TrimSpace(word) - if len(word) >= 3 && !stopWords[word] { - keywords = append(keywords, word) - } - } - - // Limit to first 5 keywords - if len(keywords) > 5 { - keywords = keywords[:5] - } - - return keywords -} - -// detectLanguage detects the programming language from a file extension. -func detectLanguage(path string) string { - ext := strings.ToLower(filepath.Ext(path)) - - languages := map[string]string{ - ".go": "go", - ".ts": "typescript", - ".tsx": "typescript", - ".js": "javascript", - ".jsx": "javascript", - ".py": "python", - ".rs": "rust", - ".java": "java", - ".kt": "kotlin", - ".swift": "swift", - ".c": "c", - ".cpp": "cpp", - ".h": "c", - ".hpp": "cpp", - ".rb": "ruby", - ".php": "php", - ".cs": "csharp", - ".fs": "fsharp", - ".scala": "scala", - ".sh": "bash", - ".bash": "bash", - ".zsh": "zsh", - ".yaml": "yaml", - ".yml": "yaml", - ".json": "json", - ".xml": "xml", - ".html": "html", - ".css": "css", - ".scss": "scss", - ".sql": "sql", - ".md": "markdown", - } - - if lang, ok := languages[ext]; ok { - return lang - } - return "text" -} - -// runGitCommand runs a git command and returns the output. -func runGitCommand(dir string, args ...string) (string, error) { - cmd := exec.Command("git", args...) - cmd.Dir = dir - - var stdout, stderr bytes.Buffer - cmd.Stdout = &stdout - cmd.Stderr = &stderr - - if err := cmd.Run(); err != nil { - return "", err - } - - return stdout.String(), nil -} - -// FormatContext formats the TaskContext for AI consumption. -func (tc *TaskContext) FormatContext() string { - var sb strings.Builder - - sb.WriteString("# Task Context\n\n") - - // Task info - sb.WriteString("## Task\n") - sb.WriteString("ID: " + tc.Task.ID + "\n") - sb.WriteString("Title: " + tc.Task.Title + "\n") - sb.WriteString("Priority: " + string(tc.Task.Priority) + "\n") - sb.WriteString("Status: " + string(tc.Task.Status) + "\n") - sb.WriteString("\n### Description\n") - sb.WriteString(tc.Task.Description + "\n\n") - - // Files - if len(tc.Files) > 0 { - sb.WriteString("## Task Files\n") - for _, f := range tc.Files { - sb.WriteString("### " + f.Path + " (" + f.Language + ")\n") - sb.WriteString("```" + f.Language + "\n") - sb.WriteString(f.Content) - sb.WriteString("\n```\n\n") - } - } - - // Git status - if tc.GitStatus != "" { - sb.WriteString("## Git Status\n") - sb.WriteString("```\n") - sb.WriteString(tc.GitStatus) - sb.WriteString("\n```\n\n") - } - - // Recent commits - if tc.RecentCommits != "" { - sb.WriteString("## Recent Commits\n") - sb.WriteString("```\n") - sb.WriteString(tc.RecentCommits) - sb.WriteString("\n```\n\n") - } - - // Related code - if len(tc.RelatedCode) > 0 { - sb.WriteString("## Related Code\n") - for _, f := range tc.RelatedCode { - sb.WriteString("### " + f.Path + " (" + f.Language + ")\n") - sb.WriteString("```" + f.Language + "\n") - sb.WriteString(f.Content) - sb.WriteString("\n```\n\n") - } - } - - return sb.String() -} diff --git a/pkg/agentic/context_test.go b/pkg/agentic/context_test.go deleted file mode 100644 index 5ef102d..0000000 --- a/pkg/agentic/context_test.go +++ /dev/null @@ -1,214 +0,0 @@ -package agentic - -import ( - "os" - "path/filepath" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestBuildTaskContext_Good(t *testing.T) { - // Create a temp directory with some files - tmpDir := t.TempDir() - - // Create a test file - testFile := filepath.Join(tmpDir, "main.go") - err := os.WriteFile(testFile, []byte("package main\n\nfunc main() {}\n"), 0644) - require.NoError(t, err) - - task := &Task{ - ID: "test-123", - Title: "Test Task", - Description: "A test task description", - Priority: PriorityMedium, - Status: StatusPending, - Files: []string{"main.go"}, - CreatedAt: time.Now(), - } - - ctx, err := BuildTaskContext(task, tmpDir) - require.NoError(t, err) - assert.NotNil(t, ctx) - assert.Equal(t, task, ctx.Task) - assert.Len(t, ctx.Files, 1) - assert.Equal(t, "main.go", ctx.Files[0].Path) - assert.Equal(t, "go", ctx.Files[0].Language) -} - -func TestBuildTaskContext_Bad_NilTask(t *testing.T) { - ctx, err := BuildTaskContext(nil, ".") - assert.Error(t, err) - assert.Nil(t, ctx) - assert.Contains(t, err.Error(), "task is required") -} - -func TestGatherRelatedFiles_Good(t *testing.T) { - tmpDir := t.TempDir() - - // Create test files - files := map[string]string{ - "app.go": "package app\n\nfunc Run() {}\n", - "config.ts": "export const config = {};\n", - "README.md": "# Project\n", - } - - for name, content := range files { - path := filepath.Join(tmpDir, name) - err := os.WriteFile(path, []byte(content), 0644) - require.NoError(t, err) - } - - task := &Task{ - ID: "task-1", - Title: "Test", - Files: []string{"app.go", "config.ts"}, - } - - gathered, err := GatherRelatedFiles(task, tmpDir) - require.NoError(t, err) - assert.Len(t, gathered, 2) - - // Check languages detected correctly - foundGo := false - foundTS := false - for _, f := range gathered { - if f.Path == "app.go" { - foundGo = true - assert.Equal(t, "go", f.Language) - } - if f.Path == "config.ts" { - foundTS = true - assert.Equal(t, "typescript", f.Language) - } - } - assert.True(t, foundGo, "should find app.go") - assert.True(t, foundTS, "should find config.ts") -} - -func TestGatherRelatedFiles_Bad_NilTask(t *testing.T) { - files, err := GatherRelatedFiles(nil, ".") - assert.Error(t, err) - assert.Nil(t, files) -} - -func TestGatherRelatedFiles_Good_MissingFiles(t *testing.T) { - tmpDir := t.TempDir() - - task := &Task{ - ID: "task-1", - Title: "Test", - Files: []string{"nonexistent.go", "also-missing.ts"}, - } - - // Should not error, just return empty list - gathered, err := GatherRelatedFiles(task, tmpDir) - require.NoError(t, err) - assert.Empty(t, gathered) -} - -func TestDetectLanguage(t *testing.T) { - tests := []struct { - path string - expected string - }{ - {"main.go", "go"}, - {"app.ts", "typescript"}, - {"app.tsx", "typescript"}, - {"script.js", "javascript"}, - {"script.jsx", "javascript"}, - {"main.py", "python"}, - {"lib.rs", "rust"}, - {"App.java", "java"}, - {"config.yaml", "yaml"}, - {"config.yml", "yaml"}, - {"data.json", "json"}, - {"index.html", "html"}, - {"styles.css", "css"}, - {"styles.scss", "scss"}, - {"query.sql", "sql"}, - {"README.md", "markdown"}, - {"unknown.xyz", "text"}, - {"", "text"}, - } - - for _, tt := range tests { - t.Run(tt.path, func(t *testing.T) { - result := detectLanguage(tt.path) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestExtractKeywords(t *testing.T) { - tests := []struct { - name string - text string - expected int // minimum number of keywords expected - }{ - { - name: "simple title", - text: "Add user authentication feature", - expected: 2, - }, - { - name: "with stop words", - text: "The quick brown fox jumps over the lazy dog", - expected: 3, - }, - { - name: "technical text", - text: "Implement OAuth2 authentication with JWT tokens", - expected: 3, - }, - { - name: "empty", - text: "", - expected: 0, - }, - { - name: "only stop words", - text: "the a an and or but in on at", - expected: 0, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - keywords := extractKeywords(tt.text) - assert.GreaterOrEqual(t, len(keywords), tt.expected) - // Keywords should not exceed 5 - assert.LessOrEqual(t, len(keywords), 5) - }) - } -} - -func TestTaskContext_FormatContext(t *testing.T) { - task := &Task{ - ID: "test-456", - Title: "Test Formatting", - Description: "This is a test description", - Priority: PriorityHigh, - Status: StatusInProgress, - } - - ctx := &TaskContext{ - Task: task, - Files: []FileContent{{Path: "main.go", Content: "package main", Language: "go"}}, - GitStatus: " M main.go", - RecentCommits: "abc123 Initial commit", - RelatedCode: []FileContent{{Path: "util.go", Content: "package util", Language: "go"}}, - } - - formatted := ctx.FormatContext() - - assert.Contains(t, formatted, "# Task Context") - assert.Contains(t, formatted, "test-456") - assert.Contains(t, formatted, "Test Formatting") - assert.Contains(t, formatted, "## Task Files") - assert.Contains(t, formatted, "## Git Status") - assert.Contains(t, formatted, "## Recent Commits") - assert.Contains(t, formatted, "## Related Code") -} diff --git a/pkg/agentic/embed.go b/pkg/agentic/embed.go deleted file mode 100644 index 4dd8886..0000000 --- a/pkg/agentic/embed.go +++ /dev/null @@ -1,19 +0,0 @@ -package agentic - -import ( - "embed" - "strings" -) - -//go:embed prompts/*.md -var promptsFS embed.FS - -// Prompt returns the content of an embedded prompt file. -// Name should be without the .md extension (e.g., "commit"). -func Prompt(name string) string { - data, err := promptsFS.ReadFile("prompts/" + name + ".md") - if err != nil { - return "" - } - return strings.TrimSpace(string(data)) -} diff --git a/pkg/agentic/prompts/commit.md b/pkg/agentic/prompts/commit.md deleted file mode 100644 index 58d6d7b..0000000 --- a/pkg/agentic/prompts/commit.md +++ /dev/null @@ -1,44 +0,0 @@ -# Commit Instructions - -## Context - -When asked to commit changes in these repositories: - -1. **All changes are pre-approved** - Both tracked (modified) and untracked files have been reviewed and approved for commit -2. **Include everything** - Commit ALL modified files AND all untracked files (including new directories) -3. **Don't skip files** - Do not leave out untracked files assuming they need separate review -4. **Single commit preferred** - Combine all changes into one cohesive commit unless explicitly told otherwise - -The user has already validated these changes. Proceed with confidence. - -## Handling Ignored Files - -If you see untracked directories that should typically be ignored (like `node_modules/`, `vendor/`, `.cache/`, `dist/`, `build/`): - -1. **Fix the .gitignore** - Create or update `.gitignore` to exclude these directories -2. **Commit the .gitignore** - Include this fix in your commit -3. **Don't ask** - Just fix it and commit - -Common patterns to add to .gitignore: -``` -node_modules/ -vendor/ -.cache/ -dist/ -build/ -*.log -.env -.DS_Store -``` - -## Commit Message Style - -- Use conventional commit format: `type(scope): description` -- Common types: `refactor`, `feat`, `fix`, `docs`, `chore` -- Keep the first line under 72 characters -- Add body for complex changes explaining the "why" -- Include `Co-Authored-By: Claude Opus 4.5 ` - -## Task - -Review the uncommitted changes and create an appropriate commit. Be concise. diff --git a/pkg/agentic/service.go b/pkg/agentic/service.go deleted file mode 100644 index 1136474..0000000 --- a/pkg/agentic/service.go +++ /dev/null @@ -1,112 +0,0 @@ -package agentic - -import ( - "context" - "os" - "os/exec" - "strings" - - "github.com/host-uk/core/pkg/framework" -) - -// Tasks for AI service - -// TaskCommit requests Claude to create a commit. -type TaskCommit struct { - Path string - Name string - CanEdit bool // allow Write/Edit tools -} - -// TaskPrompt sends a custom prompt to Claude. -type TaskPrompt struct { - Prompt string - WorkDir string - AllowedTools []string -} - -// ServiceOptions for configuring the AI service. -type ServiceOptions struct { - DefaultTools []string - AllowEdit bool // global permission for Write/Edit tools -} - -// DefaultServiceOptions returns sensible defaults. -func DefaultServiceOptions() ServiceOptions { - return ServiceOptions{ - DefaultTools: []string{"Bash", "Read", "Glob", "Grep"}, - AllowEdit: false, - } -} - -// Service provides AI/Claude operations as a Core service. -type Service struct { - *framework.ServiceRuntime[ServiceOptions] -} - -// NewService creates an AI service factory. -func NewService(opts ServiceOptions) func(*framework.Core) (any, error) { - return func(c *framework.Core) (any, error) { - return &Service{ - ServiceRuntime: framework.NewServiceRuntime(c, opts), - }, nil - } -} - -// OnStartup registers task handlers. -func (s *Service) OnStartup(ctx context.Context) error { - s.Core().RegisterTask(s.handleTask) - return nil -} - -func (s *Service) handleTask(c *framework.Core, t framework.Task) (any, bool, error) { - switch m := t.(type) { - case TaskCommit: - err := s.doCommit(m) - return nil, true, err - - case TaskPrompt: - err := s.doPrompt(m) - return nil, true, err - } - return nil, false, nil -} - -func (s *Service) doCommit(task TaskCommit) error { - prompt := Prompt("commit") - - tools := []string{"Bash", "Read", "Glob", "Grep"} - if task.CanEdit { - tools = []string{"Bash", "Read", "Write", "Edit", "Glob", "Grep"} - } - - cmd := exec.CommandContext(context.Background(), "claude", "-p", prompt, "--allowedTools", strings.Join(tools, ",")) - cmd.Dir = task.Path - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - cmd.Stdin = os.Stdin - - return cmd.Run() -} - -func (s *Service) doPrompt(task TaskPrompt) error { - opts := s.Opts() - tools := opts.DefaultTools - if len(tools) == 0 { - tools = []string{"Bash", "Read", "Glob", "Grep"} - } - - if len(task.AllowedTools) > 0 { - tools = task.AllowedTools - } - - cmd := exec.CommandContext(context.Background(), "claude", "-p", task.Prompt, "--allowedTools", strings.Join(tools, ",")) - if task.WorkDir != "" { - cmd.Dir = task.WorkDir - } - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - cmd.Stdin = os.Stdin - - return cmd.Run() -} diff --git a/pkg/agentic/types.go b/pkg/agentic/types.go deleted file mode 100644 index 53fc480..0000000 --- a/pkg/agentic/types.go +++ /dev/null @@ -1,140 +0,0 @@ -// Package agentic provides an API client for core-agentic, an AI-assisted task -// management service. It enables developers and AI agents to discover, claim, -// and complete development tasks. -package agentic - -import ( - "time" -) - -// TaskStatus represents the state of a task in the system. -type TaskStatus string - -const ( - // StatusPending indicates the task is available to be claimed. - StatusPending TaskStatus = "pending" - // StatusInProgress indicates the task has been claimed and is being worked on. - StatusInProgress TaskStatus = "in_progress" - // StatusCompleted indicates the task has been successfully completed. - StatusCompleted TaskStatus = "completed" - // StatusBlocked indicates the task cannot proceed due to dependencies. - StatusBlocked TaskStatus = "blocked" -) - -// TaskPriority represents the urgency level of a task. -type TaskPriority string - -const ( - // PriorityCritical indicates the task requires immediate attention. - PriorityCritical TaskPriority = "critical" - // PriorityHigh indicates the task is important and should be addressed soon. - PriorityHigh TaskPriority = "high" - // PriorityMedium indicates the task has normal priority. - PriorityMedium TaskPriority = "medium" - // PriorityLow indicates the task can be addressed when time permits. - PriorityLow TaskPriority = "low" -) - -// Task represents a development task in the core-agentic system. -type Task struct { - // ID is the unique identifier for the task. - ID string `json:"id"` - // Title is the short description of the task. - Title string `json:"title"` - // Description provides detailed information about what needs to be done. - Description string `json:"description"` - // Priority indicates the urgency of the task. - Priority TaskPriority `json:"priority"` - // Status indicates the current state of the task. - Status TaskStatus `json:"status"` - // Labels are tags used to categorize the task. - Labels []string `json:"labels,omitempty"` - // Files lists the files that are relevant to this task. - Files []string `json:"files,omitempty"` - // CreatedAt is when the task was created. - CreatedAt time.Time `json:"created_at"` - // UpdatedAt is when the task was last modified. - UpdatedAt time.Time `json:"updated_at,omitempty"` - // ClaimedBy is the identifier of the agent or developer who claimed the task. - ClaimedBy string `json:"claimed_by,omitempty"` - // ClaimedAt is when the task was claimed. - ClaimedAt *time.Time `json:"claimed_at,omitempty"` - // Project is the project this task belongs to. - Project string `json:"project,omitempty"` - // Dependencies lists task IDs that must be completed before this task. - Dependencies []string `json:"dependencies,omitempty"` - // Blockers lists task IDs that this task is blocking. - Blockers []string `json:"blockers,omitempty"` -} - -// TaskUpdate contains fields that can be updated on a task. -type TaskUpdate struct { - // Status is the new status for the task. - Status TaskStatus `json:"status,omitempty"` - // Progress is a percentage (0-100) indicating completion. - Progress int `json:"progress,omitempty"` - // Notes are additional comments about the update. - Notes string `json:"notes,omitempty"` -} - -// TaskResult contains the outcome of a completed task. -type TaskResult struct { - // Success indicates whether the task was completed successfully. - Success bool `json:"success"` - // Output is the result or summary of the completed work. - Output string `json:"output,omitempty"` - // Artifacts are files or resources produced by the task. - Artifacts []string `json:"artifacts,omitempty"` - // ErrorMessage contains details if the task failed. - ErrorMessage string `json:"error_message,omitempty"` -} - -// ListOptions specifies filters for listing tasks. -type ListOptions struct { - // Status filters tasks by their current status. - Status TaskStatus `json:"status,omitempty"` - // Labels filters tasks that have all specified labels. - Labels []string `json:"labels,omitempty"` - // Priority filters tasks by priority level. - Priority TaskPriority `json:"priority,omitempty"` - // Limit is the maximum number of tasks to return. - Limit int `json:"limit,omitempty"` - // Project filters tasks by project. - Project string `json:"project,omitempty"` - // ClaimedBy filters tasks claimed by a specific agent. - ClaimedBy string `json:"claimed_by,omitempty"` -} - -// APIError represents an error response from the API. -type APIError struct { - // Code is the HTTP status code. - Code int `json:"code"` - // Message is the error description. - Message string `json:"message"` - // Details provides additional context about the error. - Details string `json:"details,omitempty"` -} - -// Error implements the error interface for APIError. -func (e *APIError) Error() string { - if e.Details != "" { - return e.Message + ": " + e.Details - } - return e.Message -} - -// ClaimResponse is returned when a task is successfully claimed. -type ClaimResponse struct { - // Task is the claimed task with updated fields. - Task *Task `json:"task"` - // Message provides additional context about the claim. - Message string `json:"message,omitempty"` -} - -// CompleteResponse is returned when a task is completed. -type CompleteResponse struct { - // Task is the completed task with final status. - Task *Task `json:"task"` - // Message provides additional context about the completion. - Message string `json:"message,omitempty"` -} diff --git a/pkg/ai/cmd_ai.go b/pkg/ai/cmd_ai.go deleted file mode 100644 index 136006b..0000000 --- a/pkg/ai/cmd_ai.go +++ /dev/null @@ -1,49 +0,0 @@ -// cmd_ai.go defines styles and the AddAgenticCommands function for AI task management. - -package ai - -import ( - "github.com/host-uk/core/pkg/cli" -) - -// Style aliases from shared package -var ( - successStyle = cli.SuccessStyle - errorStyle = cli.ErrorStyle - dimStyle = cli.DimStyle - truncate = cli.Truncate - formatAge = cli.FormatAge -) - -// Task priority/status styles from shared -var ( - taskPriorityHighStyle = cli.NewStyle().Foreground(cli.ColourRed500) - taskPriorityMediumStyle = cli.NewStyle().Foreground(cli.ColourAmber500) - taskPriorityLowStyle = cli.NewStyle().Foreground(cli.ColourBlue400) - taskStatusPendingStyle = cli.DimStyle - taskStatusInProgressStyle = cli.NewStyle().Foreground(cli.ColourBlue500) - taskStatusCompletedStyle = cli.SuccessStyle - taskStatusBlockedStyle = cli.ErrorStyle -) - -// Task-specific styles (aliases to shared where possible) -var ( - taskIDStyle = cli.TitleStyle // Bold + blue - taskTitleStyle = cli.ValueStyle // Light gray - taskLabelStyle = cli.NewStyle().Foreground(cli.ColourViolet500) // Violet for labels -) - -// AddAgenticCommands adds the agentic task management commands to the ai command. -func AddAgenticCommands(parent *cli.Command) { - // Task listing and viewing - addTasksCommand(parent) - addTaskCommand(parent) - - // Task updates - addTaskUpdateCommand(parent) - addTaskCompleteCommand(parent) - - // Git integration - addTaskCommitCommand(parent) - addTaskPRCommand(parent) -} diff --git a/pkg/ai/cmd_commands.go b/pkg/ai/cmd_commands.go deleted file mode 100644 index 45e5aaf..0000000 --- a/pkg/ai/cmd_commands.go +++ /dev/null @@ -1,76 +0,0 @@ -// Package ai provides AI agent task management and Claude Code integration. -// -// Commands: -// - tasks: List tasks from the agentic service -// - task: View, claim, or auto-select tasks -// - task:update: Update task status and progress -// - task:complete: Mark tasks as completed or failed -// - task:commit: Create commits with task references -// - task:pr: Create pull requests linked to tasks -// - claude: Claude Code CLI integration (planned) -package ai - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -func init() { - cli.RegisterCommands(AddAICommands) -} - -var aiCmd = &cli.Command{ - Use: "ai", - Short: i18n.T("cmd.ai.short"), - Long: i18n.T("cmd.ai.long"), -} - -var claudeCmd = &cli.Command{ - Use: "claude", - Short: i18n.T("cmd.ai.claude.short"), - Long: i18n.T("cmd.ai.claude.long"), -} - -var claudeRunCmd = &cli.Command{ - Use: "run", - Short: i18n.T("cmd.ai.claude.run.short"), - RunE: func(cmd *cli.Command, args []string) error { - return runClaudeCode() - }, -} - -var claudeConfigCmd = &cli.Command{ - Use: "config", - Short: i18n.T("cmd.ai.claude.config.short"), - RunE: func(cmd *cli.Command, args []string) error { - return showClaudeConfig() - }, -} - -func initCommands() { - // Add Claude subcommands - claudeCmd.AddCommand(claudeRunCmd) - claudeCmd.AddCommand(claudeConfigCmd) - - // Add Claude command to ai - aiCmd.AddCommand(claudeCmd) - - // Add agentic task commands - AddAgenticCommands(aiCmd) -} - -// AddAICommands registers the 'ai' command and all subcommands. -func AddAICommands(root *cli.Command) { - initCommands() - root.AddCommand(aiCmd) -} - -func runClaudeCode() error { - // Placeholder - will integrate with claude CLI - return nil -} - -func showClaudeConfig() error { - // Placeholder - will show claude configuration - return nil -} diff --git a/pkg/ai/cmd_git.go b/pkg/ai/cmd_git.go deleted file mode 100644 index 6a7a17c..0000000 --- a/pkg/ai/cmd_git.go +++ /dev/null @@ -1,248 +0,0 @@ -// cmd_git.go implements git integration commands for task commits and PRs. - -package ai - -import ( - "bytes" - "context" - "os" - "os/exec" - "strings" - "time" - - "github.com/host-uk/core/pkg/agentic" - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -// task:commit command flags -var ( - taskCommitMessage string - taskCommitScope string - taskCommitPush bool -) - -// task:pr command flags -var ( - taskPRTitle string - taskPRDraft bool - taskPRLabels string - taskPRBase string -) - -var taskCommitCmd = &cli.Command{ - Use: "task:commit [task-id]", - Short: i18n.T("cmd.ai.task_commit.short"), - Long: i18n.T("cmd.ai.task_commit.long"), - Args: cli.ExactArgs(1), - RunE: func(cmd *cli.Command, args []string) error { - taskID := args[0] - - if taskCommitMessage == "" { - return cli.Err("commit message required") - } - - cfg, err := agentic.LoadConfig("") - if err != nil { - return cli.WrapVerb(err, "load", "config") - } - - client := agentic.NewClientFromConfig(cfg) - - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - - // Get task details - task, err := client.GetTask(ctx, taskID) - if err != nil { - return cli.WrapVerb(err, "get", "task") - } - - // Build commit message with optional scope - commitType := inferCommitType(task.Labels) - var fullMessage string - if taskCommitScope != "" { - fullMessage = cli.Sprintf("%s(%s): %s", commitType, taskCommitScope, taskCommitMessage) - } else { - fullMessage = cli.Sprintf("%s: %s", commitType, taskCommitMessage) - } - - // Get current directory - cwd, err := os.Getwd() - if err != nil { - return cli.WrapVerb(err, "get", "working directory") - } - - // Check for uncommitted changes - hasChanges, err := agentic.HasUncommittedChanges(ctx, cwd) - if err != nil { - return cli.WrapVerb(err, "check", "git status") - } - - if !hasChanges { - cli.Println("No changes to commit") - return nil - } - - // Create commit - cli.Print("%s %s\n", dimStyle.Render(">>"), i18n.ProgressSubject("create", "commit for "+taskID)) - if err := agentic.AutoCommit(ctx, task, cwd, fullMessage); err != nil { - return cli.WrapAction(err, "commit") - } - - cli.Print("%s %s %s\n", successStyle.Render(">>"), i18n.T("i18n.done.commit")+":", fullMessage) - - // Push if requested - if taskCommitPush { - cli.Print("%s %s\n", dimStyle.Render(">>"), i18n.Progress("push")) - if err := agentic.PushChanges(ctx, cwd); err != nil { - return cli.WrapAction(err, "push") - } - cli.Print("%s %s\n", successStyle.Render(">>"), i18n.T("i18n.done.push", "changes")) - } - - return nil - }, -} - -var taskPRCmd = &cli.Command{ - Use: "task:pr [task-id]", - Short: i18n.T("cmd.ai.task_pr.short"), - Long: i18n.T("cmd.ai.task_pr.long"), - Args: cli.ExactArgs(1), - RunE: func(cmd *cli.Command, args []string) error { - taskID := args[0] - - cfg, err := agentic.LoadConfig("") - if err != nil { - return cli.WrapVerb(err, "load", "config") - } - - client := agentic.NewClientFromConfig(cfg) - - ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) - defer cancel() - - // Get task details - task, err := client.GetTask(ctx, taskID) - if err != nil { - return cli.WrapVerb(err, "get", "task") - } - - // Get current directory - cwd, err := os.Getwd() - if err != nil { - return cli.WrapVerb(err, "get", "working directory") - } - - // Check current branch - branch, err := agentic.GetCurrentBranch(ctx, cwd) - if err != nil { - return cli.WrapVerb(err, "get", "branch") - } - - if branch == "main" || branch == "master" { - return cli.Err("cannot create PR from %s branch", branch) - } - - // Push current branch - cli.Print("%s %s\n", dimStyle.Render(">>"), i18n.ProgressSubject("push", branch)) - if err := agentic.PushChanges(ctx, cwd); err != nil { - // Try setting upstream - if _, err := runGitCommand(cwd, "push", "-u", "origin", branch); err != nil { - return cli.WrapVerb(err, "push", "branch") - } - } - - // Build PR options - opts := agentic.PROptions{ - Title: taskPRTitle, - Draft: taskPRDraft, - Base: taskPRBase, - } - - if taskPRLabels != "" { - opts.Labels = strings.Split(taskPRLabels, ",") - } - - // Create PR - cli.Print("%s %s\n", dimStyle.Render(">>"), i18n.ProgressSubject("create", "PR")) - prURL, err := agentic.CreatePR(ctx, task, cwd, opts) - if err != nil { - return cli.WrapVerb(err, "create", "PR") - } - - cli.Print("%s %s\n", successStyle.Render(">>"), i18n.T("i18n.done.create", "PR")) - cli.Print(" %s %s\n", i18n.Label("url"), prURL) - - return nil - }, -} - -func initGitFlags() { - // task:commit command flags - taskCommitCmd.Flags().StringVarP(&taskCommitMessage, "message", "m", "", i18n.T("cmd.ai.task_commit.flag.message")) - taskCommitCmd.Flags().StringVar(&taskCommitScope, "scope", "", i18n.T("cmd.ai.task_commit.flag.scope")) - taskCommitCmd.Flags().BoolVar(&taskCommitPush, "push", false, i18n.T("cmd.ai.task_commit.flag.push")) - - // task:pr command flags - taskPRCmd.Flags().StringVar(&taskPRTitle, "title", "", i18n.T("cmd.ai.task_pr.flag.title")) - taskPRCmd.Flags().BoolVar(&taskPRDraft, "draft", false, i18n.T("cmd.ai.task_pr.flag.draft")) - taskPRCmd.Flags().StringVar(&taskPRLabels, "labels", "", i18n.T("cmd.ai.task_pr.flag.labels")) - taskPRCmd.Flags().StringVar(&taskPRBase, "base", "", i18n.T("cmd.ai.task_pr.flag.base")) -} - -func addTaskCommitCommand(parent *cli.Command) { - initGitFlags() - parent.AddCommand(taskCommitCmd) -} - -func addTaskPRCommand(parent *cli.Command) { - parent.AddCommand(taskPRCmd) -} - -// inferCommitType infers the commit type from task labels. -func inferCommitType(labels []string) string { - for _, label := range labels { - switch strings.ToLower(label) { - case "bug", "bugfix", "fix": - return "fix" - case "docs", "documentation": - return "docs" - case "refactor", "refactoring": - return "refactor" - case "test", "tests", "testing": - return "test" - case "chore": - return "chore" - case "style": - return "style" - case "perf", "performance": - return "perf" - case "ci": - return "ci" - case "build": - return "build" - } - } - return "feat" -} - -// runGitCommand runs a git command in the specified directory. -func runGitCommand(dir string, args ...string) (string, error) { - cmd := exec.Command("git", args...) - cmd.Dir = dir - - var stdout, stderr bytes.Buffer - cmd.Stdout = &stdout - cmd.Stderr = &stderr - - if err := cmd.Run(); err != nil { - if stderr.Len() > 0 { - return "", cli.Wrap(err, stderr.String()) - } - return "", err - } - - return stdout.String(), nil -} diff --git a/pkg/ai/cmd_tasks.go b/pkg/ai/cmd_tasks.go deleted file mode 100644 index db82111..0000000 --- a/pkg/ai/cmd_tasks.go +++ /dev/null @@ -1,289 +0,0 @@ -// cmd_tasks.go implements task listing and viewing commands. - -package ai - -import ( - "context" - "os" - "sort" - "strings" - "time" - - "github.com/host-uk/core/pkg/agentic" - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -// tasks command flags -var ( - tasksStatus string - tasksPriority string - tasksLabels string - tasksLimit int - tasksProject string -) - -// task command flags -var ( - taskAutoSelect bool - taskClaim bool - taskShowContext bool -) - -var tasksCmd = &cli.Command{ - Use: "tasks", - Short: i18n.T("cmd.ai.tasks.short"), - Long: i18n.T("cmd.ai.tasks.long"), - RunE: func(cmd *cli.Command, args []string) error { - limit := tasksLimit - if limit == 0 { - limit = 20 - } - - cfg, err := agentic.LoadConfig("") - if err != nil { - return cli.WrapVerb(err, "load", "config") - } - - client := agentic.NewClientFromConfig(cfg) - - opts := agentic.ListOptions{ - Limit: limit, - Project: tasksProject, - } - - if tasksStatus != "" { - opts.Status = agentic.TaskStatus(tasksStatus) - } - if tasksPriority != "" { - opts.Priority = agentic.TaskPriority(tasksPriority) - } - if tasksLabels != "" { - opts.Labels = strings.Split(tasksLabels, ",") - } - - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - - tasks, err := client.ListTasks(ctx, opts) - if err != nil { - return cli.WrapVerb(err, "list", "tasks") - } - - if len(tasks) == 0 { - cli.Text(i18n.T("cmd.ai.tasks.none_found")) - return nil - } - - printTaskList(tasks) - return nil - }, -} - -var taskCmd = &cli.Command{ - Use: "task [task-id]", - Short: i18n.T("cmd.ai.task.short"), - Long: i18n.T("cmd.ai.task.long"), - RunE: func(cmd *cli.Command, args []string) error { - cfg, err := agentic.LoadConfig("") - if err != nil { - return cli.WrapVerb(err, "load", "config") - } - - client := agentic.NewClientFromConfig(cfg) - - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - - var task *agentic.Task - - // Get the task ID from args - var taskID string - if len(args) > 0 { - taskID = args[0] - } - - if taskAutoSelect { - // Auto-select: find highest priority pending task - tasks, err := client.ListTasks(ctx, agentic.ListOptions{ - Status: agentic.StatusPending, - Limit: 50, - }) - if err != nil { - return cli.WrapVerb(err, "list", "tasks") - } - - if len(tasks) == 0 { - cli.Text(i18n.T("cmd.ai.task.no_pending")) - return nil - } - - // Sort by priority (critical > high > medium > low) - priorityOrder := map[agentic.TaskPriority]int{ - agentic.PriorityCritical: 0, - agentic.PriorityHigh: 1, - agentic.PriorityMedium: 2, - agentic.PriorityLow: 3, - } - - sort.Slice(tasks, func(i, j int) bool { - return priorityOrder[tasks[i].Priority] < priorityOrder[tasks[j].Priority] - }) - - task = &tasks[0] - taskClaim = true // Auto-select implies claiming - } else { - if taskID == "" { - return cli.Err("%s", i18n.T("cmd.ai.task.id_required")) - } - - task, err = client.GetTask(ctx, taskID) - if err != nil { - return cli.WrapVerb(err, "get", "task") - } - } - - // Show context if requested - if taskShowContext { - cwd, _ := os.Getwd() - taskCtx, err := agentic.BuildTaskContext(task, cwd) - if err != nil { - cli.Print("%s %s: %s\n", errorStyle.Render(">>"), i18n.T("i18n.fail.build", "context"), err) - } else { - cli.Text(taskCtx.FormatContext()) - } - } else { - printTaskDetails(task) - } - - if taskClaim && task.Status == agentic.StatusPending { - cli.Blank() - cli.Print("%s %s\n", dimStyle.Render(">>"), i18n.T("cmd.ai.task.claiming")) - - claimedTask, err := client.ClaimTask(ctx, task.ID) - if err != nil { - return cli.WrapVerb(err, "claim", "task") - } - - cli.Print("%s %s\n", successStyle.Render(">>"), i18n.T("i18n.done.claim", "task")) - cli.Print(" %s %s\n", i18n.Label("status"), formatTaskStatus(claimedTask.Status)) - } - - return nil - }, -} - -func initTasksFlags() { - // tasks command flags - tasksCmd.Flags().StringVar(&tasksStatus, "status", "", i18n.T("cmd.ai.tasks.flag.status")) - tasksCmd.Flags().StringVar(&tasksPriority, "priority", "", i18n.T("cmd.ai.tasks.flag.priority")) - tasksCmd.Flags().StringVar(&tasksLabels, "labels", "", i18n.T("cmd.ai.tasks.flag.labels")) - tasksCmd.Flags().IntVar(&tasksLimit, "limit", 20, i18n.T("cmd.ai.tasks.flag.limit")) - tasksCmd.Flags().StringVar(&tasksProject, "project", "", i18n.T("cmd.ai.tasks.flag.project")) - - // task command flags - taskCmd.Flags().BoolVar(&taskAutoSelect, "auto", false, i18n.T("cmd.ai.task.flag.auto")) - taskCmd.Flags().BoolVar(&taskClaim, "claim", false, i18n.T("cmd.ai.task.flag.claim")) - taskCmd.Flags().BoolVar(&taskShowContext, "context", false, i18n.T("cmd.ai.task.flag.context")) -} - -func addTasksCommand(parent *cli.Command) { - initTasksFlags() - parent.AddCommand(tasksCmd) -} - -func addTaskCommand(parent *cli.Command) { - parent.AddCommand(taskCmd) -} - -func printTaskList(tasks []agentic.Task) { - cli.Print("\n%s\n\n", i18n.T("cmd.ai.tasks.found", map[string]interface{}{"Count": len(tasks)})) - - for _, task := range tasks { - id := taskIDStyle.Render(task.ID) - title := taskTitleStyle.Render(truncate(task.Title, 50)) - priority := formatTaskPriority(task.Priority) - status := formatTaskStatus(task.Status) - - line := cli.Sprintf(" %s %s %s %s", id, priority, status, title) - - if len(task.Labels) > 0 { - labels := taskLabelStyle.Render("[" + strings.Join(task.Labels, ", ") + "]") - line += " " + labels - } - - cli.Text(line) - } - - cli.Blank() - cli.Print("%s\n", dimStyle.Render(i18n.T("cmd.ai.tasks.hint"))) -} - -func printTaskDetails(task *agentic.Task) { - cli.Blank() - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.ai.label.id")), taskIDStyle.Render(task.ID)) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.ai.label.title")), taskTitleStyle.Render(task.Title)) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.ai.label.priority")), formatTaskPriority(task.Priority)) - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("status")), formatTaskStatus(task.Status)) - - if task.Project != "" { - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("project")), task.Project) - } - - if len(task.Labels) > 0 { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.ai.label.labels")), taskLabelStyle.Render(strings.Join(task.Labels, ", "))) - } - - if task.ClaimedBy != "" { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.ai.label.claimed_by")), task.ClaimedBy) - } - - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.ai.label.created")), formatAge(task.CreatedAt)) - - cli.Blank() - cli.Print("%s\n", dimStyle.Render(i18n.T("cmd.ai.label.description"))) - cli.Text(task.Description) - - if len(task.Files) > 0 { - cli.Blank() - cli.Print("%s\n", dimStyle.Render(i18n.T("cmd.ai.label.related_files"))) - for _, f := range task.Files { - cli.Print(" - %s\n", f) - } - } - - if len(task.Dependencies) > 0 { - cli.Blank() - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.ai.label.blocked_by")), strings.Join(task.Dependencies, ", ")) - } -} - -func formatTaskPriority(p agentic.TaskPriority) string { - switch p { - case agentic.PriorityCritical: - return taskPriorityHighStyle.Render("[" + i18n.T("cmd.ai.priority.critical") + "]") - case agentic.PriorityHigh: - return taskPriorityHighStyle.Render("[" + i18n.T("cmd.ai.priority.high") + "]") - case agentic.PriorityMedium: - return taskPriorityMediumStyle.Render("[" + i18n.T("cmd.ai.priority.medium") + "]") - case agentic.PriorityLow: - return taskPriorityLowStyle.Render("[" + i18n.T("cmd.ai.priority.low") + "]") - default: - return dimStyle.Render("[" + string(p) + "]") - } -} - -func formatTaskStatus(s agentic.TaskStatus) string { - switch s { - case agentic.StatusPending: - return taskStatusPendingStyle.Render(i18n.T("cmd.ai.status.pending")) - case agentic.StatusInProgress: - return taskStatusInProgressStyle.Render(i18n.T("cmd.ai.status.in_progress")) - case agentic.StatusCompleted: - return taskStatusCompletedStyle.Render(i18n.T("cmd.ai.status.completed")) - case agentic.StatusBlocked: - return taskStatusBlockedStyle.Render(i18n.T("cmd.ai.status.blocked")) - default: - return dimStyle.Render(string(s)) - } -} \ No newline at end of file diff --git a/pkg/ai/cmd_updates.go b/pkg/ai/cmd_updates.go deleted file mode 100644 index 91fd7ad..0000000 --- a/pkg/ai/cmd_updates.go +++ /dev/null @@ -1,123 +0,0 @@ -// cmd_updates.go implements task update and completion commands. - -package ai - -import ( - "context" - "time" - - "github.com/host-uk/core/pkg/agentic" - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -// task:update command flags -var ( - taskUpdateStatus string - taskUpdateProgress int - taskUpdateNotes string -) - -// task:complete command flags -var ( - taskCompleteOutput string - taskCompleteFailed bool - taskCompleteErrorMsg string -) - -var taskUpdateCmd = &cli.Command{ - Use: "task:update [task-id]", - Short: i18n.T("cmd.ai.task_update.short"), - Long: i18n.T("cmd.ai.task_update.long"), - Args: cli.ExactArgs(1), - RunE: func(cmd *cli.Command, args []string) error { - taskID := args[0] - - if taskUpdateStatus == "" && taskUpdateProgress == 0 && taskUpdateNotes == "" { - return cli.Err("%s", i18n.T("cmd.ai.task_update.flag_required")) - } - - cfg, err := agentic.LoadConfig("") - if err != nil { - return cli.WrapVerb(err, "load", "config") - } - - client := agentic.NewClientFromConfig(cfg) - - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - - update := agentic.TaskUpdate{ - Progress: taskUpdateProgress, - Notes: taskUpdateNotes, - } - if taskUpdateStatus != "" { - update.Status = agentic.TaskStatus(taskUpdateStatus) - } - - if err := client.UpdateTask(ctx, taskID, update); err != nil { - return cli.WrapVerb(err, "update", "task") - } - - cli.Print("%s %s\n", successStyle.Render(">>"), i18n.T("i18n.done.update", "task")) - return nil - }, -} - -var taskCompleteCmd = &cli.Command{ - Use: "task:complete [task-id]", - Short: i18n.T("cmd.ai.task_complete.short"), - Long: i18n.T("cmd.ai.task_complete.long"), - Args: cli.ExactArgs(1), - RunE: func(cmd *cli.Command, args []string) error { - taskID := args[0] - - cfg, err := agentic.LoadConfig("") - if err != nil { - return cli.WrapVerb(err, "load", "config") - } - - client := agentic.NewClientFromConfig(cfg) - - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - - result := agentic.TaskResult{ - Success: !taskCompleteFailed, - Output: taskCompleteOutput, - ErrorMessage: taskCompleteErrorMsg, - } - - if err := client.CompleteTask(ctx, taskID, result); err != nil { - return cli.WrapVerb(err, "complete", "task") - } - - if taskCompleteFailed { - cli.Print("%s %s\n", errorStyle.Render(">>"), i18n.T("cmd.ai.task_complete.failed", map[string]interface{}{"ID": taskID})) - } else { - cli.Print("%s %s\n", successStyle.Render(">>"), i18n.T("i18n.done.complete", "task")) - } - return nil - }, -} - -func initUpdatesFlags() { - // task:update command flags - taskUpdateCmd.Flags().StringVar(&taskUpdateStatus, "status", "", i18n.T("cmd.ai.task_update.flag.status")) - taskUpdateCmd.Flags().IntVar(&taskUpdateProgress, "progress", 0, i18n.T("cmd.ai.task_update.flag.progress")) - taskUpdateCmd.Flags().StringVar(&taskUpdateNotes, "notes", "", i18n.T("cmd.ai.task_update.flag.notes")) - - // task:complete command flags - taskCompleteCmd.Flags().StringVar(&taskCompleteOutput, "output", "", i18n.T("cmd.ai.task_complete.flag.output")) - taskCompleteCmd.Flags().BoolVar(&taskCompleteFailed, "failed", false, i18n.T("cmd.ai.task_complete.flag.failed")) - taskCompleteCmd.Flags().StringVar(&taskCompleteErrorMsg, "error", "", i18n.T("cmd.ai.task_complete.flag.error")) -} - -func addTaskUpdateCommand(parent *cli.Command) { - initUpdatesFlags() - parent.AddCommand(taskUpdateCmd) -} - -func addTaskCompleteCommand(parent *cli.Command) { - parent.AddCommand(taskCompleteCmd) -} diff --git a/pkg/build/archive.go b/pkg/build/archive.go deleted file mode 100644 index b0451f2..0000000 --- a/pkg/build/archive.go +++ /dev/null @@ -1,191 +0,0 @@ -// Package build provides project type detection and cross-compilation for the Core build system. -package build - -import ( - "archive/tar" - "archive/zip" - "compress/gzip" - "fmt" - "io" - "os" - "path/filepath" - "strings" -) - -// Archive creates an archive for a single artifact. -// Uses tar.gz for linux/darwin and zip for windows. -// The archive is created alongside the binary (e.g., dist/myapp_linux_amd64.tar.gz). -// Returns a new Artifact with Path pointing to the archive. -func Archive(artifact Artifact) (Artifact, error) { - if artifact.Path == "" { - return Artifact{}, fmt.Errorf("build.Archive: artifact path is empty") - } - - // Verify the source file exists - info, err := os.Stat(artifact.Path) - if err != nil { - return Artifact{}, fmt.Errorf("build.Archive: source file not found: %w", err) - } - if info.IsDir() { - return Artifact{}, fmt.Errorf("build.Archive: source path is a directory, expected file") - } - - // Determine archive type based on OS - var archivePath string - var archiveFunc func(src, dst string) error - - if artifact.OS == "windows" { - archivePath = archiveFilename(artifact, ".zip") - archiveFunc = createZipArchive - } else { - archivePath = archiveFilename(artifact, ".tar.gz") - archiveFunc = createTarGzArchive - } - - // Create the archive - if err := archiveFunc(artifact.Path, archivePath); err != nil { - return Artifact{}, fmt.Errorf("build.Archive: failed to create archive: %w", err) - } - - return Artifact{ - Path: archivePath, - OS: artifact.OS, - Arch: artifact.Arch, - Checksum: artifact.Checksum, - }, nil -} - -// ArchiveAll archives all artifacts. -// Returns a slice of new artifacts pointing to the archives. -func ArchiveAll(artifacts []Artifact) ([]Artifact, error) { - if len(artifacts) == 0 { - return nil, nil - } - - var archived []Artifact - for _, artifact := range artifacts { - arch, err := Archive(artifact) - if err != nil { - return archived, fmt.Errorf("build.ArchiveAll: failed to archive %s: %w", artifact.Path, err) - } - archived = append(archived, arch) - } - - return archived, nil -} - -// archiveFilename generates the archive filename based on the artifact and extension. -// Format: dist/myapp_linux_amd64.tar.gz (binary name taken from artifact path). -func archiveFilename(artifact Artifact, ext string) string { - // Get the directory containing the binary (e.g., dist/linux_amd64) - dir := filepath.Dir(artifact.Path) - // Go up one level to the output directory (e.g., dist) - outputDir := filepath.Dir(dir) - - // Get the binary name without extension - binaryName := filepath.Base(artifact.Path) - binaryName = strings.TrimSuffix(binaryName, ".exe") - - // Construct archive name: myapp_linux_amd64.tar.gz - archiveName := fmt.Sprintf("%s_%s_%s%s", binaryName, artifact.OS, artifact.Arch, ext) - - return filepath.Join(outputDir, archiveName) -} - -// createTarGzArchive creates a tar.gz archive containing a single file. -func createTarGzArchive(src, dst string) error { - // Open the source file - srcFile, err := os.Open(src) - if err != nil { - return fmt.Errorf("failed to open source file: %w", err) - } - defer srcFile.Close() - - srcInfo, err := srcFile.Stat() - if err != nil { - return fmt.Errorf("failed to stat source file: %w", err) - } - - // Create the destination file - dstFile, err := os.Create(dst) - if err != nil { - return fmt.Errorf("failed to create archive file: %w", err) - } - defer dstFile.Close() - - // Create gzip writer - gzWriter := gzip.NewWriter(dstFile) - defer gzWriter.Close() - - // Create tar writer - tarWriter := tar.NewWriter(gzWriter) - defer tarWriter.Close() - - // Create tar header - header, err := tar.FileInfoHeader(srcInfo, "") - if err != nil { - return fmt.Errorf("failed to create tar header: %w", err) - } - // Use just the filename, not the full path - header.Name = filepath.Base(src) - - // Write header - if err := tarWriter.WriteHeader(header); err != nil { - return fmt.Errorf("failed to write tar header: %w", err) - } - - // Write file content - if _, err := io.Copy(tarWriter, srcFile); err != nil { - return fmt.Errorf("failed to write file content to tar: %w", err) - } - - return nil -} - -// createZipArchive creates a zip archive containing a single file. -func createZipArchive(src, dst string) error { - // Open the source file - srcFile, err := os.Open(src) - if err != nil { - return fmt.Errorf("failed to open source file: %w", err) - } - defer srcFile.Close() - - srcInfo, err := srcFile.Stat() - if err != nil { - return fmt.Errorf("failed to stat source file: %w", err) - } - - // Create the destination file - dstFile, err := os.Create(dst) - if err != nil { - return fmt.Errorf("failed to create archive file: %w", err) - } - defer dstFile.Close() - - // Create zip writer - zipWriter := zip.NewWriter(dstFile) - defer zipWriter.Close() - - // Create zip header - header, err := zip.FileInfoHeader(srcInfo) - if err != nil { - return fmt.Errorf("failed to create zip header: %w", err) - } - // Use just the filename, not the full path - header.Name = filepath.Base(src) - header.Method = zip.Deflate - - // Create file in archive - writer, err := zipWriter.CreateHeader(header) - if err != nil { - return fmt.Errorf("failed to create zip entry: %w", err) - } - - // Write file content - if _, err := io.Copy(writer, srcFile); err != nil { - return fmt.Errorf("failed to write file content to zip: %w", err) - } - - return nil -} diff --git a/pkg/build/archive_test.go b/pkg/build/archive_test.go deleted file mode 100644 index 27d6660..0000000 --- a/pkg/build/archive_test.go +++ /dev/null @@ -1,308 +0,0 @@ -package build - -import ( - "archive/tar" - "archive/zip" - "compress/gzip" - "io" - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// setupArchiveTestFile creates a test binary file in a temp directory with the standard structure. -// Returns the path to the binary and the output directory. -func setupArchiveTestFile(t *testing.T, name, os_, arch string) (binaryPath string, outputDir string) { - t.Helper() - - outputDir = t.TempDir() - - // Create platform directory: dist/os_arch - platformDir := filepath.Join(outputDir, os_+"_"+arch) - err := os.MkdirAll(platformDir, 0755) - require.NoError(t, err) - - // Create test binary - binaryPath = filepath.Join(platformDir, name) - content := []byte("#!/bin/bash\necho 'Hello, World!'\n") - err = os.WriteFile(binaryPath, content, 0755) - require.NoError(t, err) - - return binaryPath, outputDir -} - -func TestArchive_Good(t *testing.T) { - t.Run("creates tar.gz for linux", func(t *testing.T) { - binaryPath, outputDir := setupArchiveTestFile(t, "myapp", "linux", "amd64") - - artifact := Artifact{ - Path: binaryPath, - OS: "linux", - Arch: "amd64", - } - - result, err := Archive(artifact) - require.NoError(t, err) - - // Verify archive was created - expectedPath := filepath.Join(outputDir, "myapp_linux_amd64.tar.gz") - assert.Equal(t, expectedPath, result.Path) - assert.FileExists(t, result.Path) - - // Verify OS and Arch are preserved - assert.Equal(t, "linux", result.OS) - assert.Equal(t, "amd64", result.Arch) - - // Verify archive content - verifyTarGzContent(t, result.Path, "myapp") - }) - - t.Run("creates tar.gz for darwin", func(t *testing.T) { - binaryPath, outputDir := setupArchiveTestFile(t, "myapp", "darwin", "arm64") - - artifact := Artifact{ - Path: binaryPath, - OS: "darwin", - Arch: "arm64", - } - - result, err := Archive(artifact) - require.NoError(t, err) - - expectedPath := filepath.Join(outputDir, "myapp_darwin_arm64.tar.gz") - assert.Equal(t, expectedPath, result.Path) - assert.FileExists(t, result.Path) - - verifyTarGzContent(t, result.Path, "myapp") - }) - - t.Run("creates zip for windows", func(t *testing.T) { - binaryPath, outputDir := setupArchiveTestFile(t, "myapp.exe", "windows", "amd64") - - artifact := Artifact{ - Path: binaryPath, - OS: "windows", - Arch: "amd64", - } - - result, err := Archive(artifact) - require.NoError(t, err) - - // Windows archives should strip .exe from archive name - expectedPath := filepath.Join(outputDir, "myapp_windows_amd64.zip") - assert.Equal(t, expectedPath, result.Path) - assert.FileExists(t, result.Path) - - verifyZipContent(t, result.Path, "myapp.exe") - }) - - t.Run("preserves checksum field", func(t *testing.T) { - binaryPath, _ := setupArchiveTestFile(t, "myapp", "linux", "amd64") - - artifact := Artifact{ - Path: binaryPath, - OS: "linux", - Arch: "amd64", - Checksum: "abc123", - } - - result, err := Archive(artifact) - require.NoError(t, err) - assert.Equal(t, "abc123", result.Checksum) - }) -} - -func TestArchive_Bad(t *testing.T) { - t.Run("returns error for empty path", func(t *testing.T) { - artifact := Artifact{ - Path: "", - OS: "linux", - Arch: "amd64", - } - - result, err := Archive(artifact) - assert.Error(t, err) - assert.Contains(t, err.Error(), "artifact path is empty") - assert.Empty(t, result.Path) - }) - - t.Run("returns error for non-existent file", func(t *testing.T) { - artifact := Artifact{ - Path: "/nonexistent/path/binary", - OS: "linux", - Arch: "amd64", - } - - result, err := Archive(artifact) - assert.Error(t, err) - assert.Contains(t, err.Error(), "source file not found") - assert.Empty(t, result.Path) - }) - - t.Run("returns error for directory path", func(t *testing.T) { - dir := t.TempDir() - - artifact := Artifact{ - Path: dir, - OS: "linux", - Arch: "amd64", - } - - result, err := Archive(artifact) - assert.Error(t, err) - assert.Contains(t, err.Error(), "source path is a directory") - assert.Empty(t, result.Path) - }) -} - -func TestArchiveAll_Good(t *testing.T) { - t.Run("archives multiple artifacts", func(t *testing.T) { - outputDir := t.TempDir() - - // Create multiple binaries - var artifacts []Artifact - targets := []struct { - os_ string - arch string - }{ - {"linux", "amd64"}, - {"linux", "arm64"}, - {"darwin", "arm64"}, - {"windows", "amd64"}, - } - - for _, target := range targets { - platformDir := filepath.Join(outputDir, target.os_+"_"+target.arch) - err := os.MkdirAll(platformDir, 0755) - require.NoError(t, err) - - name := "myapp" - if target.os_ == "windows" { - name = "myapp.exe" - } - - binaryPath := filepath.Join(platformDir, name) - err = os.WriteFile(binaryPath, []byte("binary content"), 0755) - require.NoError(t, err) - - artifacts = append(artifacts, Artifact{ - Path: binaryPath, - OS: target.os_, - Arch: target.arch, - }) - } - - results, err := ArchiveAll(artifacts) - require.NoError(t, err) - require.Len(t, results, 4) - - // Verify all archives were created - for i, result := range results { - assert.FileExists(t, result.Path) - assert.Equal(t, artifacts[i].OS, result.OS) - assert.Equal(t, artifacts[i].Arch, result.Arch) - } - }) - - t.Run("returns nil for empty slice", func(t *testing.T) { - results, err := ArchiveAll([]Artifact{}) - assert.NoError(t, err) - assert.Nil(t, results) - }) - - t.Run("returns nil for nil slice", func(t *testing.T) { - results, err := ArchiveAll(nil) - assert.NoError(t, err) - assert.Nil(t, results) - }) -} - -func TestArchiveAll_Bad(t *testing.T) { - t.Run("returns partial results on error", func(t *testing.T) { - binaryPath, _ := setupArchiveTestFile(t, "myapp", "linux", "amd64") - - artifacts := []Artifact{ - {Path: binaryPath, OS: "linux", Arch: "amd64"}, - {Path: "/nonexistent/binary", OS: "linux", Arch: "arm64"}, // This will fail - } - - results, err := ArchiveAll(artifacts) - assert.Error(t, err) - // Should have the first successful result - assert.Len(t, results, 1) - assert.FileExists(t, results[0].Path) - }) -} - -func TestArchiveFilename_Good(t *testing.T) { - t.Run("generates correct tar.gz filename", func(t *testing.T) { - artifact := Artifact{ - Path: "/output/linux_amd64/myapp", - OS: "linux", - Arch: "amd64", - } - - filename := archiveFilename(artifact, ".tar.gz") - assert.Equal(t, "/output/myapp_linux_amd64.tar.gz", filename) - }) - - t.Run("generates correct zip filename", func(t *testing.T) { - artifact := Artifact{ - Path: "/output/windows_amd64/myapp.exe", - OS: "windows", - Arch: "amd64", - } - - filename := archiveFilename(artifact, ".zip") - assert.Equal(t, "/output/myapp_windows_amd64.zip", filename) - }) - - t.Run("handles nested output directories", func(t *testing.T) { - artifact := Artifact{ - Path: "/project/dist/linux_arm64/cli", - OS: "linux", - Arch: "arm64", - } - - filename := archiveFilename(artifact, ".tar.gz") - assert.Equal(t, "/project/dist/cli_linux_arm64.tar.gz", filename) - }) -} - -// verifyTarGzContent opens a tar.gz file and verifies it contains the expected file. -func verifyTarGzContent(t *testing.T, archivePath, expectedName string) { - t.Helper() - - file, err := os.Open(archivePath) - require.NoError(t, err) - defer file.Close() - - gzReader, err := gzip.NewReader(file) - require.NoError(t, err) - defer gzReader.Close() - - tarReader := tar.NewReader(gzReader) - - header, err := tarReader.Next() - require.NoError(t, err) - assert.Equal(t, expectedName, header.Name) - - // Verify there's only one file - _, err = tarReader.Next() - assert.Equal(t, io.EOF, err) -} - -// verifyZipContent opens a zip file and verifies it contains the expected file. -func verifyZipContent(t *testing.T, archivePath, expectedName string) { - t.Helper() - - reader, err := zip.OpenReader(archivePath) - require.NoError(t, err) - defer reader.Close() - - require.Len(t, reader.File, 1) - assert.Equal(t, expectedName, reader.File[0].Name) -} diff --git a/pkg/build/build.go b/pkg/build/build.go deleted file mode 100644 index 947d589..0000000 --- a/pkg/build/build.go +++ /dev/null @@ -1,76 +0,0 @@ -// Package build provides project type detection and cross-compilation for the Core build system. -// It supports Go, Wails, Node.js, and PHP projects with automatic detection based on -// marker files (go.mod, wails.json, package.json, composer.json). -package build - -import ( - "context" -) - -// ProjectType represents a detected project type. -type ProjectType string - -const ( - ProjectTypeGo ProjectType = "go" - ProjectTypeWails ProjectType = "wails" - ProjectTypeNode ProjectType = "node" - ProjectTypePHP ProjectType = "php" - ProjectTypeDocker ProjectType = "docker" - ProjectTypeLinuxKit ProjectType = "linuxkit" - ProjectTypeTaskfile ProjectType = "taskfile" -) - -// Target represents a build target platform. -type Target struct { - OS string - Arch string -} - -// String returns the target in GOOS/GOARCH format. -func (t Target) String() string { - return t.OS + "/" + t.Arch -} - -// Artifact represents a build output file. -type Artifact struct { - Path string - OS string - Arch string - Checksum string -} - -// Config holds build configuration. -type Config struct { - // ProjectDir is the root directory of the project. - ProjectDir string - // OutputDir is where build artifacts are placed. - OutputDir string - // Name is the output binary name. - Name string - // Version is the build version string. - Version string - // LDFlags are additional linker flags. - LDFlags []string - - // Docker-specific config - Dockerfile string // Path to Dockerfile (default: Dockerfile) - Registry string // Container registry (default: ghcr.io) - Image string // Image name (owner/repo format) - Tags []string // Additional tags to apply - BuildArgs map[string]string // Docker build arguments - Push bool // Whether to push after build - - // LinuxKit-specific config - LinuxKitConfig string // Path to LinuxKit YAML config - Formats []string // Output formats (iso, qcow2, raw, vmdk) -} - -// Builder defines the interface for project-specific build implementations. -type Builder interface { - // Name returns the builder's identifier. - Name() string - // Detect checks if this builder can handle the project in the given directory. - Detect(dir string) (bool, error) - // Build compiles the project for the specified targets. - Build(ctx context.Context, cfg *Config, targets []Target) ([]Artifact, error) -} diff --git a/pkg/build/buildcmd/cmd_build.go b/pkg/build/buildcmd/cmd_build.go deleted file mode 100644 index 50f774c..0000000 --- a/pkg/build/buildcmd/cmd_build.go +++ /dev/null @@ -1,141 +0,0 @@ -// Package buildcmd provides project build commands with auto-detection. -package buildcmd - -import ( - "embed" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -func init() { - cli.RegisterCommands(AddBuildCommands) -} - -// Style aliases from shared package -var ( - buildHeaderStyle = cli.TitleStyle - buildTargetStyle = cli.ValueStyle - buildSuccessStyle = cli.SuccessStyle - buildErrorStyle = cli.ErrorStyle - buildDimStyle = cli.DimStyle -) - -//go:embed all:tmpl/gui -var guiTemplate embed.FS - -// Flags for the main build command -var ( - buildType string - ciMode bool - targets string - outputDir string - doArchive bool - doChecksum bool - - // Docker/LinuxKit specific flags - configPath string - format string - push bool - imageName string - - // Signing flags - noSign bool - notarize bool - - // from-path subcommand - fromPath string - - // pwa subcommand - pwaURL string - - // sdk subcommand - sdkSpec string - sdkLang string - sdkVersion string - sdkDryRun bool -) - -var buildCmd = &cobra.Command{ - Use: "build", - Short: i18n.T("cmd.build.short"), - Long: i18n.T("cmd.build.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return runProjectBuild(buildType, ciMode, targets, outputDir, doArchive, doChecksum, configPath, format, push, imageName, noSign, notarize) - }, -} - -var fromPathCmd = &cobra.Command{ - Use: "from-path", - Short: i18n.T("cmd.build.from_path.short"), - RunE: func(cmd *cobra.Command, args []string) error { - if fromPath == "" { - return errPathRequired - } - return runBuild(fromPath) - }, -} - -var pwaCmd = &cobra.Command{ - Use: "pwa", - Short: i18n.T("cmd.build.pwa.short"), - RunE: func(cmd *cobra.Command, args []string) error { - if pwaURL == "" { - return errURLRequired - } - return runPwaBuild(pwaURL) - }, -} - -var sdkBuildCmd = &cobra.Command{ - Use: "sdk", - Short: i18n.T("cmd.build.sdk.short"), - Long: i18n.T("cmd.build.sdk.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return runBuildSDK(sdkSpec, sdkLang, sdkVersion, sdkDryRun) - }, -} - -func initBuildFlags() { - // Main build command flags - buildCmd.Flags().StringVar(&buildType, "type", "", i18n.T("cmd.build.flag.type")) - buildCmd.Flags().BoolVar(&ciMode, "ci", false, i18n.T("cmd.build.flag.ci")) - buildCmd.Flags().StringVar(&targets, "targets", "", i18n.T("cmd.build.flag.targets")) - buildCmd.Flags().StringVar(&outputDir, "output", "", i18n.T("cmd.build.flag.output")) - buildCmd.Flags().BoolVar(&doArchive, "archive", true, i18n.T("cmd.build.flag.archive")) - buildCmd.Flags().BoolVar(&doChecksum, "checksum", true, i18n.T("cmd.build.flag.checksum")) - - // Docker/LinuxKit specific - buildCmd.Flags().StringVar(&configPath, "config", "", i18n.T("cmd.build.flag.config")) - buildCmd.Flags().StringVar(&format, "format", "", i18n.T("cmd.build.flag.format")) - buildCmd.Flags().BoolVar(&push, "push", false, i18n.T("cmd.build.flag.push")) - buildCmd.Flags().StringVar(&imageName, "image", "", i18n.T("cmd.build.flag.image")) - - // Signing flags - buildCmd.Flags().BoolVar(&noSign, "no-sign", false, i18n.T("cmd.build.flag.no_sign")) - buildCmd.Flags().BoolVar(¬arize, "notarize", false, i18n.T("cmd.build.flag.notarize")) - - // from-path subcommand flags - fromPathCmd.Flags().StringVar(&fromPath, "path", "", i18n.T("cmd.build.from_path.flag.path")) - - // pwa subcommand flags - pwaCmd.Flags().StringVar(&pwaURL, "url", "", i18n.T("cmd.build.pwa.flag.url")) - - // sdk subcommand flags - sdkBuildCmd.Flags().StringVar(&sdkSpec, "spec", "", i18n.T("common.flag.spec")) - sdkBuildCmd.Flags().StringVar(&sdkLang, "lang", "", i18n.T("cmd.build.sdk.flag.lang")) - sdkBuildCmd.Flags().StringVar(&sdkVersion, "version", "", i18n.T("cmd.build.sdk.flag.version")) - sdkBuildCmd.Flags().BoolVar(&sdkDryRun, "dry-run", false, i18n.T("cmd.build.sdk.flag.dry_run")) - - // Add subcommands - buildCmd.AddCommand(fromPathCmd) - buildCmd.AddCommand(pwaCmd) - buildCmd.AddCommand(sdkBuildCmd) -} - -// AddBuildCommands registers the 'build' command and all subcommands. -func AddBuildCommands(root *cobra.Command) { - initBuildFlags() - root.AddCommand(buildCmd) -} diff --git a/pkg/build/buildcmd/cmd_commands.go b/pkg/build/buildcmd/cmd_commands.go deleted file mode 100644 index 310d558..0000000 --- a/pkg/build/buildcmd/cmd_commands.go +++ /dev/null @@ -1,21 +0,0 @@ -// Package buildcmd provides project build commands with auto-detection. -// -// Supports building: -// - Go projects (standard and cross-compilation) -// - Wails desktop applications -// - Docker images -// - LinuxKit VM images -// - Taskfile-based projects -// -// Configuration via .core/build.yaml or command-line flags. -// -// Subcommands: -// - build: Auto-detect and build the current project -// - build from-path: Build from a local static web app directory -// - build pwa: Build from a live PWA URL -// - build sdk: Generate API SDKs from OpenAPI spec -package buildcmd - -// Note: The AddBuildCommands function is defined in cmd_build.go -// This file exists for documentation purposes and maintains the original -// package documentation from commands.go. diff --git a/pkg/build/buildcmd/cmd_project.go b/pkg/build/buildcmd/cmd_project.go deleted file mode 100644 index 1b7109a..0000000 --- a/pkg/build/buildcmd/cmd_project.go +++ /dev/null @@ -1,369 +0,0 @@ -// cmd_project.go implements the main project build logic. -// -// This handles auto-detection of project types (Go, Wails, Docker, LinuxKit, Taskfile) -// and orchestrates the build process including signing, archiving, and checksums. - -package buildcmd - -import ( - "context" - "encoding/json" - "fmt" - "os" - "path/filepath" - "runtime" - "strings" - - "github.com/host-uk/core/pkg/build" - "github.com/host-uk/core/pkg/build/builders" - "github.com/host-uk/core/pkg/build/signing" - "github.com/host-uk/core/pkg/i18n" -) - -// runProjectBuild handles the main `core build` command with auto-detection. -func runProjectBuild(buildType string, ciMode bool, targetsFlag string, outputDir string, doArchive bool, doChecksum bool, configPath string, format string, push bool, imageName string, noSign bool, notarize bool) error { - // Get current working directory as project root - projectDir, err := os.Getwd() - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "get working directory"}), err) - } - - // Load configuration from .core/build.yaml (or defaults) - buildCfg, err := build.LoadConfig(projectDir) - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "load config"}), err) - } - - // Detect project type if not specified - var projectType build.ProjectType - if buildType != "" { - projectType = build.ProjectType(buildType) - } else { - projectType, err = build.PrimaryType(projectDir) - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "detect project type"}), err) - } - if projectType == "" { - return fmt.Errorf("%s", i18n.T("cmd.build.error.no_project_type", map[string]interface{}{"Dir": projectDir})) - } - } - - // Determine targets - var buildTargets []build.Target - if targetsFlag != "" { - // Parse from command line - buildTargets, err = parseTargets(targetsFlag) - if err != nil { - return err - } - } else if len(buildCfg.Targets) > 0 { - // Use config targets - buildTargets = buildCfg.ToTargets() - } else { - // Fall back to current OS/arch - buildTargets = []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, - } - } - - // Determine output directory - if outputDir == "" { - outputDir = "dist" - } - - // Determine binary name - binaryName := buildCfg.Project.Binary - if binaryName == "" { - binaryName = buildCfg.Project.Name - } - if binaryName == "" { - binaryName = filepath.Base(projectDir) - } - - // Print build info (unless CI mode) - if !ciMode { - fmt.Printf("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.label.build")), i18n.T("cmd.build.building_project")) - fmt.Printf(" %s %s\n", i18n.T("cmd.build.label.type"), buildTargetStyle.Render(string(projectType))) - fmt.Printf(" %s %s\n", i18n.T("cmd.build.label.output"), buildTargetStyle.Render(outputDir)) - fmt.Printf(" %s %s\n", i18n.T("cmd.build.label.binary"), buildTargetStyle.Render(binaryName)) - fmt.Printf(" %s %s\n", i18n.T("cmd.build.label.targets"), buildTargetStyle.Render(formatTargets(buildTargets))) - fmt.Println() - } - - // Get the appropriate builder - builder, err := getBuilder(projectType) - if err != nil { - return err - } - - // Create build config for the builder - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: outputDir, - Name: binaryName, - Version: buildCfg.Project.Name, // Could be enhanced with git describe - LDFlags: buildCfg.Build.LDFlags, - // Docker/LinuxKit specific - Dockerfile: configPath, // Reuse for Dockerfile path - LinuxKitConfig: configPath, - Push: push, - Image: imageName, - } - - // Parse formats for LinuxKit - if format != "" { - cfg.Formats = strings.Split(format, ",") - } - - // Execute build - ctx := context.Background() - artifacts, err := builder.Build(ctx, cfg, buildTargets) - if err != nil { - if !ciMode { - fmt.Printf("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("common.error.failed", map[string]any{"Action": "build"}), err) - } - return err - } - - if !ciMode { - fmt.Printf("%s %s\n", buildSuccessStyle.Render(i18n.T("common.label.success")), i18n.T("cmd.build.built_artifacts", map[string]interface{}{"Count": len(artifacts)})) - fmt.Println() - for _, artifact := range artifacts { - relPath, err := filepath.Rel(projectDir, artifact.Path) - if err != nil { - relPath = artifact.Path - } - fmt.Printf(" %s %s %s\n", - buildSuccessStyle.Render("*"), - buildTargetStyle.Render(relPath), - buildDimStyle.Render(fmt.Sprintf("(%s/%s)", artifact.OS, artifact.Arch)), - ) - } - } - - // Sign macOS binaries if enabled - signCfg := buildCfg.Sign - if notarize { - signCfg.MacOS.Notarize = true - } - if noSign { - signCfg.Enabled = false - } - - if signCfg.Enabled && runtime.GOOS == "darwin" { - if !ciMode { - fmt.Println() - fmt.Printf("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.label.sign")), i18n.T("cmd.build.signing_binaries")) - } - - // Convert build.Artifact to signing.Artifact - signingArtifacts := make([]signing.Artifact, len(artifacts)) - for i, a := range artifacts { - signingArtifacts[i] = signing.Artifact{Path: a.Path, OS: a.OS, Arch: a.Arch} - } - - if err := signing.SignBinaries(ctx, signCfg, signingArtifacts); err != nil { - if !ciMode { - fmt.Printf("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.signing_failed"), err) - } - return err - } - - if signCfg.MacOS.Notarize { - if err := signing.NotarizeBinaries(ctx, signCfg, signingArtifacts); err != nil { - if !ciMode { - fmt.Printf("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.notarization_failed"), err) - } - return err - } - } - } - - // Archive artifacts if enabled - var archivedArtifacts []build.Artifact - if doArchive && len(artifacts) > 0 { - if !ciMode { - fmt.Println() - fmt.Printf("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.label.archive")), i18n.T("cmd.build.creating_archives")) - } - - archivedArtifacts, err = build.ArchiveAll(artifacts) - if err != nil { - if !ciMode { - fmt.Printf("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.archive_failed"), err) - } - return err - } - - if !ciMode { - for _, artifact := range archivedArtifacts { - relPath, err := filepath.Rel(projectDir, artifact.Path) - if err != nil { - relPath = artifact.Path - } - fmt.Printf(" %s %s %s\n", - buildSuccessStyle.Render("*"), - buildTargetStyle.Render(relPath), - buildDimStyle.Render(fmt.Sprintf("(%s/%s)", artifact.OS, artifact.Arch)), - ) - } - } - } - - // Compute checksums if enabled - var checksummedArtifacts []build.Artifact - if doChecksum && len(archivedArtifacts) > 0 { - checksummedArtifacts, err = computeAndWriteChecksums(ctx, projectDir, outputDir, archivedArtifacts, signCfg, ciMode) - if err != nil { - return err - } - } else if doChecksum && len(artifacts) > 0 && !doArchive { - // Checksum raw binaries if archiving is disabled - checksummedArtifacts, err = computeAndWriteChecksums(ctx, projectDir, outputDir, artifacts, signCfg, ciMode) - if err != nil { - return err - } - } - - // Output results for CI mode - if ciMode { - // Determine which artifacts to output (prefer checksummed > archived > raw) - var outputArtifacts []build.Artifact - if len(checksummedArtifacts) > 0 { - outputArtifacts = checksummedArtifacts - } else if len(archivedArtifacts) > 0 { - outputArtifacts = archivedArtifacts - } else { - outputArtifacts = artifacts - } - - // JSON output for CI - output, err := json.MarshalIndent(outputArtifacts, "", " ") - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "marshal artifacts"}), err) - } - fmt.Println(string(output)) - } - - return nil -} - -// computeAndWriteChecksums computes checksums for artifacts and writes CHECKSUMS.txt. -func computeAndWriteChecksums(ctx context.Context, projectDir, outputDir string, artifacts []build.Artifact, signCfg signing.SignConfig, ciMode bool) ([]build.Artifact, error) { - if !ciMode { - fmt.Println() - fmt.Printf("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.label.checksum")), i18n.T("cmd.build.computing_checksums")) - } - - checksummedArtifacts, err := build.ChecksumAll(artifacts) - if err != nil { - if !ciMode { - fmt.Printf("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.checksum_failed"), err) - } - return nil, err - } - - // Write CHECKSUMS.txt - checksumPath := filepath.Join(outputDir, "CHECKSUMS.txt") - if err := build.WriteChecksumFile(checksummedArtifacts, checksumPath); err != nil { - if !ciMode { - fmt.Printf("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("common.error.failed", map[string]any{"Action": "write CHECKSUMS.txt"}), err) - } - return nil, err - } - - // Sign checksums with GPG - if signCfg.Enabled { - if err := signing.SignChecksums(ctx, signCfg, checksumPath); err != nil { - if !ciMode { - fmt.Printf("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.gpg_signing_failed"), err) - } - return nil, err - } - } - - if !ciMode { - for _, artifact := range checksummedArtifacts { - relPath, err := filepath.Rel(projectDir, artifact.Path) - if err != nil { - relPath = artifact.Path - } - fmt.Printf(" %s %s\n", - buildSuccessStyle.Render("*"), - buildTargetStyle.Render(relPath), - ) - fmt.Printf(" %s\n", buildDimStyle.Render(artifact.Checksum)) - } - - relChecksumPath, err := filepath.Rel(projectDir, checksumPath) - if err != nil { - relChecksumPath = checksumPath - } - fmt.Printf(" %s %s\n", - buildSuccessStyle.Render("*"), - buildTargetStyle.Render(relChecksumPath), - ) - } - - return checksummedArtifacts, nil -} - -// parseTargets parses a comma-separated list of OS/arch pairs. -func parseTargets(targetsFlag string) ([]build.Target, error) { - parts := strings.Split(targetsFlag, ",") - var targets []build.Target - - for _, part := range parts { - part = strings.TrimSpace(part) - if part == "" { - continue - } - - osArch := strings.Split(part, "/") - if len(osArch) != 2 { - return nil, fmt.Errorf("%s", i18n.T("cmd.build.error.invalid_target", map[string]interface{}{"Target": part})) - } - - targets = append(targets, build.Target{ - OS: strings.TrimSpace(osArch[0]), - Arch: strings.TrimSpace(osArch[1]), - }) - } - - if len(targets) == 0 { - return nil, fmt.Errorf("%s", i18n.T("cmd.build.error.no_targets")) - } - - return targets, nil -} - -// formatTargets returns a human-readable string of targets. -func formatTargets(targets []build.Target) string { - var parts []string - for _, t := range targets { - parts = append(parts, t.String()) - } - return strings.Join(parts, ", ") -} - -// getBuilder returns the appropriate builder for the project type. -func getBuilder(projectType build.ProjectType) (build.Builder, error) { - switch projectType { - case build.ProjectTypeWails: - return builders.NewWailsBuilder(), nil - case build.ProjectTypeGo: - return builders.NewGoBuilder(), nil - case build.ProjectTypeDocker: - return builders.NewDockerBuilder(), nil - case build.ProjectTypeLinuxKit: - return builders.NewLinuxKitBuilder(), nil - case build.ProjectTypeTaskfile: - return builders.NewTaskfileBuilder(), nil - case build.ProjectTypeNode: - return nil, fmt.Errorf("%s", i18n.T("cmd.build.error.node_not_implemented")) - case build.ProjectTypePHP: - return nil, fmt.Errorf("%s", i18n.T("cmd.build.error.php_not_implemented")) - default: - return nil, fmt.Errorf("%s: %s", i18n.T("cmd.build.error.unsupported_type"), projectType) - } -} diff --git a/pkg/build/buildcmd/cmd_pwa.go b/pkg/build/buildcmd/cmd_pwa.go deleted file mode 100644 index 09f3f13..0000000 --- a/pkg/build/buildcmd/cmd_pwa.go +++ /dev/null @@ -1,324 +0,0 @@ -// cmd_pwa.go implements PWA and legacy GUI build functionality. -// -// Supports building desktop applications from: -// - Local static web application directories -// - Live PWA URLs (downloads and packages) - -package buildcmd - -import ( - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "net/url" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/i18n" - "github.com/leaanthony/debme" - "github.com/leaanthony/gosod" - "golang.org/x/net/html" -) - -// Error sentinels for build commands -var ( - errPathRequired = errors.New("the --path flag is required") - errURLRequired = errors.New("the --url flag is required") -) - -// runPwaBuild downloads a PWA from URL and builds it. -func runPwaBuild(pwaURL string) error { - fmt.Printf("%s %s\n", i18n.T("cmd.build.pwa.starting"), pwaURL) - - tempDir, err := os.MkdirTemp("", "core-pwa-build-*") - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "create temporary directory"}), err) - } - // defer os.RemoveAll(tempDir) // Keep temp dir for debugging - fmt.Printf("%s %s\n", i18n.T("cmd.build.pwa.downloading_to"), tempDir) - - if err := downloadPWA(pwaURL, tempDir); err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "download PWA"}), err) - } - - return runBuild(tempDir) -} - -// downloadPWA fetches a PWA from a URL and saves assets locally. -func downloadPWA(baseURL, destDir string) error { - // Fetch the main HTML page - resp, err := http.Get(baseURL) - if err != nil { - return fmt.Errorf("%s %s: %w", i18n.T("common.error.failed", map[string]any{"Action": "fetch URL"}), baseURL, err) - } - defer resp.Body.Close() - - body, err := io.ReadAll(resp.Body) - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "read response body"}), err) - } - - // Find the manifest URL from the HTML - manifestURL, err := findManifestURL(string(body), baseURL) - if err != nil { - // If no manifest, it's not a PWA, but we can still try to package it as a simple site. - fmt.Printf("%s %s\n", i18n.T("common.label.warning"), i18n.T("cmd.build.pwa.no_manifest")) - if err := os.WriteFile(filepath.Join(destDir, "index.html"), body, 0644); err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "write index.html"}), err) - } - return nil - } - - fmt.Printf("%s %s\n", i18n.T("cmd.build.pwa.found_manifest"), manifestURL) - - // Fetch and parse the manifest - manifest, err := fetchManifest(manifestURL) - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "fetch or parse manifest"}), err) - } - - // Download all assets listed in the manifest - assets := collectAssets(manifest, manifestURL) - for _, assetURL := range assets { - if err := downloadAsset(assetURL, destDir); err != nil { - fmt.Printf("%s %s %s: %v\n", i18n.T("common.label.warning"), i18n.T("common.error.failed", map[string]any{"Action": "download asset"}), assetURL, err) - } - } - - // Also save the root index.html - if err := os.WriteFile(filepath.Join(destDir, "index.html"), body, 0644); err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "write index.html"}), err) - } - - fmt.Println(i18n.T("cmd.build.pwa.download_complete")) - return nil -} - -// findManifestURL extracts the manifest URL from HTML content. -func findManifestURL(htmlContent, baseURL string) (string, error) { - doc, err := html.Parse(strings.NewReader(htmlContent)) - if err != nil { - return "", err - } - - var manifestPath string - var f func(*html.Node) - f = func(n *html.Node) { - if n.Type == html.ElementNode && n.Data == "link" { - var rel, href string - for _, a := range n.Attr { - if a.Key == "rel" { - rel = a.Val - } - if a.Key == "href" { - href = a.Val - } - } - if rel == "manifest" && href != "" { - manifestPath = href - return - } - } - for c := n.FirstChild; c != nil; c = c.NextSibling { - f(c) - } - } - f(doc) - - if manifestPath == "" { - return "", fmt.Errorf("%s", i18n.T("cmd.build.pwa.error.no_manifest_tag")) - } - - base, err := url.Parse(baseURL) - if err != nil { - return "", err - } - - manifestURL, err := base.Parse(manifestPath) - if err != nil { - return "", err - } - - return manifestURL.String(), nil -} - -// fetchManifest downloads and parses a PWA manifest. -func fetchManifest(manifestURL string) (map[string]interface{}, error) { - resp, err := http.Get(manifestURL) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - var manifest map[string]interface{} - if err := json.NewDecoder(resp.Body).Decode(&manifest); err != nil { - return nil, err - } - return manifest, nil -} - -// collectAssets extracts asset URLs from a PWA manifest. -func collectAssets(manifest map[string]interface{}, manifestURL string) []string { - var assets []string - base, _ := url.Parse(manifestURL) - - // Add start_url - if startURL, ok := manifest["start_url"].(string); ok { - if resolved, err := base.Parse(startURL); err == nil { - assets = append(assets, resolved.String()) - } - } - - // Add icons - if icons, ok := manifest["icons"].([]interface{}); ok { - for _, icon := range icons { - if iconMap, ok := icon.(map[string]interface{}); ok { - if src, ok := iconMap["src"].(string); ok { - if resolved, err := base.Parse(src); err == nil { - assets = append(assets, resolved.String()) - } - } - } - } - } - - return assets -} - -// downloadAsset fetches a single asset and saves it locally. -func downloadAsset(assetURL, destDir string) error { - resp, err := http.Get(assetURL) - if err != nil { - return err - } - defer resp.Body.Close() - - u, err := url.Parse(assetURL) - if err != nil { - return err - } - - path := filepath.Join(destDir, filepath.FromSlash(u.Path)) - if err := os.MkdirAll(filepath.Dir(path), os.ModePerm); err != nil { - return err - } - - out, err := os.Create(path) - if err != nil { - return err - } - defer out.Close() - - _, err = io.Copy(out, resp.Body) - return err -} - -// runBuild builds a desktop application from a local directory. -func runBuild(fromPath string) error { - fmt.Printf("%s %s\n", i18n.T("cmd.build.from_path.starting"), fromPath) - - info, err := os.Stat(fromPath) - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("cmd.build.from_path.error.invalid_path"), err) - } - if !info.IsDir() { - return fmt.Errorf("%s", i18n.T("cmd.build.from_path.error.must_be_directory")) - } - - buildDir := ".core/build/app" - htmlDir := filepath.Join(buildDir, "html") - appName := filepath.Base(fromPath) - if strings.HasPrefix(appName, "core-pwa-build-") { - appName = "pwa-app" - } - outputExe := appName - - if err := os.RemoveAll(buildDir); err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "clean build directory"}), err) - } - - // 1. Generate the project from the embedded template - fmt.Println(i18n.T("cmd.build.from_path.generating_template")) - templateFS, err := debme.FS(guiTemplate, "tmpl/gui") - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "anchor template filesystem"}), err) - } - sod := gosod.New(templateFS) - if sod == nil { - return fmt.Errorf("%s", i18n.T("common.error.failed", map[string]any{"Action": "create new sod instance"})) - } - - templateData := map[string]string{"AppName": appName} - if err := sod.Extract(buildDir, templateData); err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "extract template"}), err) - } - - // 2. Copy the user's web app files - fmt.Println(i18n.T("cmd.build.from_path.copying_files")) - if err := copyDir(fromPath, htmlDir); err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "copy application files"}), err) - } - - // 3. Compile the application - fmt.Println(i18n.T("cmd.build.from_path.compiling")) - - // Run go mod tidy - cmd := exec.Command("go", "mod", "tidy") - cmd.Dir = buildDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("%s: %w", i18n.T("cmd.build.from_path.error.go_mod_tidy"), err) - } - - // Run go build - cmd = exec.Command("go", "build", "-o", outputExe) - cmd.Dir = buildDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("%s: %w", i18n.T("cmd.build.from_path.error.go_build"), err) - } - - fmt.Printf("\n%s %s/%s\n", i18n.T("cmd.build.from_path.success"), buildDir, outputExe) - return nil -} - -// copyDir recursively copies a directory from src to dst. -func copyDir(src, dst string) error { - return filepath.Walk(src, func(path string, info os.FileInfo, err error) error { - if err != nil { - return err - } - - relPath, err := filepath.Rel(src, path) - if err != nil { - return err - } - - dstPath := filepath.Join(dst, relPath) - - if info.IsDir() { - return os.MkdirAll(dstPath, info.Mode()) - } - - srcFile, err := os.Open(path) - if err != nil { - return err - } - defer srcFile.Close() - - dstFile, err := os.Create(dstPath) - if err != nil { - return err - } - defer dstFile.Close() - - _, err = io.Copy(dstFile, srcFile) - return err - }) -} diff --git a/pkg/build/buildcmd/cmd_sdk.go b/pkg/build/buildcmd/cmd_sdk.go deleted file mode 100644 index 8102293..0000000 --- a/pkg/build/buildcmd/cmd_sdk.go +++ /dev/null @@ -1,82 +0,0 @@ -// cmd_sdk.go implements SDK generation from OpenAPI specifications. -// -// Generates typed API clients for TypeScript, Python, Go, and PHP -// from OpenAPI/Swagger specifications. - -package buildcmd - -import ( - "context" - "fmt" - "os" - "strings" - - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/sdk" -) - -// runBuildSDK handles the `core build sdk` command. -func runBuildSDK(specPath, lang, version string, dryRun bool) error { - ctx := context.Background() - - projectDir, err := os.Getwd() - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("common.error.failed", map[string]any{"Action": "get working directory"}), err) - } - - // Load config - config := sdk.DefaultConfig() - if specPath != "" { - config.Spec = specPath - } - - s := sdk.New(projectDir, config) - if version != "" { - s.SetVersion(version) - } - - fmt.Printf("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.sdk.label")), i18n.T("cmd.build.sdk.generating")) - if dryRun { - fmt.Printf(" %s\n", buildDimStyle.Render(i18n.T("cmd.build.sdk.dry_run_mode"))) - } - fmt.Println() - - // Detect spec - detectedSpec, err := s.DetectSpec() - if err != nil { - fmt.Printf("%s %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), err) - return err - } - fmt.Printf(" %s %s\n", i18n.T("common.label.spec"), buildTargetStyle.Render(detectedSpec)) - - if dryRun { - if lang != "" { - fmt.Printf(" %s %s\n", i18n.T("cmd.build.sdk.language_label"), buildTargetStyle.Render(lang)) - } else { - fmt.Printf(" %s %s\n", i18n.T("cmd.build.sdk.languages_label"), buildTargetStyle.Render(strings.Join(config.Languages, ", "))) - } - fmt.Println() - fmt.Printf("%s %s\n", buildSuccessStyle.Render(i18n.T("cmd.build.label.ok")), i18n.T("cmd.build.sdk.would_generate")) - return nil - } - - if lang != "" { - // Generate single language - if err := s.GenerateLanguage(ctx, lang); err != nil { - fmt.Printf("%s %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), err) - return err - } - fmt.Printf(" %s %s\n", i18n.T("cmd.build.sdk.generated_label"), buildTargetStyle.Render(lang)) - } else { - // Generate all - if err := s.Generate(ctx); err != nil { - fmt.Printf("%s %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), err) - return err - } - fmt.Printf(" %s %s\n", i18n.T("cmd.build.sdk.generated_label"), buildTargetStyle.Render(strings.Join(config.Languages, ", "))) - } - - fmt.Println() - fmt.Printf("%s %s\n", buildSuccessStyle.Render(i18n.T("common.label.success")), i18n.T("cmd.build.sdk.complete")) - return nil -} diff --git a/pkg/build/buildcmd/tmpl/gui/go.mod.tmpl b/pkg/build/buildcmd/tmpl/gui/go.mod.tmpl deleted file mode 100644 index 1a30708..0000000 --- a/pkg/build/buildcmd/tmpl/gui/go.mod.tmpl +++ /dev/null @@ -1,7 +0,0 @@ -module {{.AppName}} - -go 1.21 - -require ( - github.com/wailsapp/wails/v3 v3.0.0-alpha.8 -) diff --git a/pkg/build/buildcmd/tmpl/gui/html/.gitkeep b/pkg/build/buildcmd/tmpl/gui/html/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/pkg/build/buildcmd/tmpl/gui/html/.placeholder b/pkg/build/buildcmd/tmpl/gui/html/.placeholder deleted file mode 100644 index 1044078..0000000 --- a/pkg/build/buildcmd/tmpl/gui/html/.placeholder +++ /dev/null @@ -1 +0,0 @@ -// This file ensures the 'html' directory is correctly embedded by the Go compiler. diff --git a/pkg/build/buildcmd/tmpl/gui/main.go.tmpl b/pkg/build/buildcmd/tmpl/gui/main.go.tmpl deleted file mode 100644 index 2b71fed..0000000 --- a/pkg/build/buildcmd/tmpl/gui/main.go.tmpl +++ /dev/null @@ -1,25 +0,0 @@ -package main - -import ( - "embed" - "log" - - "github.com/wailsapp/wails/v3/pkg/application" -) - -//go:embed all:html -var assets embed.FS - -func main() { - app := application.New(application.Options{ - Name: "{{.AppName}}", - Description: "A web application enclaved by Core.", - Assets: application.AssetOptions{ - FS: assets, - }, - }) - - if err := app.Run(); err != nil { - log.Fatal(err) - } -} diff --git a/pkg/build/builders/docker.go b/pkg/build/builders/docker.go deleted file mode 100644 index f2f53e7..0000000 --- a/pkg/build/builders/docker.go +++ /dev/null @@ -1,214 +0,0 @@ -// Package builders provides build implementations for different project types. -package builders - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/build" -) - -// DockerBuilder builds Docker images. -type DockerBuilder struct{} - -// NewDockerBuilder creates a new Docker builder. -func NewDockerBuilder() *DockerBuilder { - return &DockerBuilder{} -} - -// Name returns the builder's identifier. -func (b *DockerBuilder) Name() string { - return "docker" -} - -// Detect checks if a Dockerfile exists in the directory. -func (b *DockerBuilder) Detect(dir string) (bool, error) { - dockerfilePath := filepath.Join(dir, "Dockerfile") - if _, err := os.Stat(dockerfilePath); err == nil { - return true, nil - } - return false, nil -} - -// Build builds Docker images for the specified targets. -func (b *DockerBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) { - // Validate docker CLI is available - if err := b.validateDockerCli(); err != nil { - return nil, err - } - - // Ensure buildx is available - if err := b.ensureBuildx(ctx); err != nil { - return nil, err - } - - // Determine Dockerfile path - dockerfile := cfg.Dockerfile - if dockerfile == "" { - dockerfile = filepath.Join(cfg.ProjectDir, "Dockerfile") - } - - // Validate Dockerfile exists - if _, err := os.Stat(dockerfile); err != nil { - return nil, fmt.Errorf("docker.Build: Dockerfile not found: %s", dockerfile) - } - - // Determine image name - imageName := cfg.Image - if imageName == "" { - imageName = cfg.Name - } - if imageName == "" { - imageName = filepath.Base(cfg.ProjectDir) - } - - // Build platform string from targets - var platforms []string - for _, t := range targets { - platforms = append(platforms, fmt.Sprintf("%s/%s", t.OS, t.Arch)) - } - - // If no targets specified, use current platform - if len(platforms) == 0 { - platforms = []string{"linux/amd64"} - } - - // Determine registry - registry := cfg.Registry - if registry == "" { - registry = "ghcr.io" - } - - // Determine tags - tags := cfg.Tags - if len(tags) == 0 { - tags = []string{"latest"} - if cfg.Version != "" { - tags = append(tags, cfg.Version) - } - } - - // Build full image references - var imageRefs []string - for _, tag := range tags { - // Expand version template - expandedTag := strings.ReplaceAll(tag, "{{.Version}}", cfg.Version) - expandedTag = strings.ReplaceAll(expandedTag, "{{Version}}", cfg.Version) - - if registry != "" { - imageRefs = append(imageRefs, fmt.Sprintf("%s/%s:%s", registry, imageName, expandedTag)) - } else { - imageRefs = append(imageRefs, fmt.Sprintf("%s:%s", imageName, expandedTag)) - } - } - - // Build the docker buildx command - args := []string{"buildx", "build"} - - // Multi-platform support - args = append(args, "--platform", strings.Join(platforms, ",")) - - // Add all tags - for _, ref := range imageRefs { - args = append(args, "-t", ref) - } - - // Dockerfile path - args = append(args, "-f", dockerfile) - - // Build arguments - for k, v := range cfg.BuildArgs { - expandedValue := strings.ReplaceAll(v, "{{.Version}}", cfg.Version) - expandedValue = strings.ReplaceAll(expandedValue, "{{Version}}", cfg.Version) - args = append(args, "--build-arg", fmt.Sprintf("%s=%s", k, expandedValue)) - } - - // Always add VERSION build arg if version is set - if cfg.Version != "" { - args = append(args, "--build-arg", fmt.Sprintf("VERSION=%s", cfg.Version)) - } - - // Output to local docker images or push - if cfg.Push { - args = append(args, "--push") - } else { - // For multi-platform builds without push, we need to load or output somewhere - if len(platforms) == 1 { - args = append(args, "--load") - } else { - // Multi-platform builds can't use --load, output to tarball - outputPath := filepath.Join(cfg.OutputDir, fmt.Sprintf("%s.tar", imageName)) - args = append(args, "--output", fmt.Sprintf("type=oci,dest=%s", outputPath)) - } - } - - // Build context (project directory) - args = append(args, cfg.ProjectDir) - - // Create output directory - if err := os.MkdirAll(cfg.OutputDir, 0755); err != nil { - return nil, fmt.Errorf("docker.Build: failed to create output directory: %w", err) - } - - // Execute build - cmd := exec.CommandContext(ctx, "docker", args...) - cmd.Dir = cfg.ProjectDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - fmt.Printf("Building Docker image: %s\n", imageName) - fmt.Printf(" Platforms: %s\n", strings.Join(platforms, ", ")) - fmt.Printf(" Tags: %s\n", strings.Join(imageRefs, ", ")) - - if err := cmd.Run(); err != nil { - return nil, fmt.Errorf("docker.Build: buildx build failed: %w", err) - } - - // Create artifacts for each platform - var artifacts []build.Artifact - for _, t := range targets { - artifacts = append(artifacts, build.Artifact{ - Path: imageRefs[0], // Primary image reference - OS: t.OS, - Arch: t.Arch, - }) - } - - return artifacts, nil -} - -// validateDockerCli checks if the docker CLI is available. -func (b *DockerBuilder) validateDockerCli() error { - cmd := exec.Command("docker", "--version") - if err := cmd.Run(); err != nil { - return fmt.Errorf("docker: docker CLI not found. Install it from https://docs.docker.com/get-docker/") - } - return nil -} - -// ensureBuildx ensures docker buildx is available and has a builder. -func (b *DockerBuilder) ensureBuildx(ctx context.Context) error { - // Check if buildx is available - cmd := exec.CommandContext(ctx, "docker", "buildx", "version") - if err := cmd.Run(); err != nil { - return fmt.Errorf("docker: buildx is not available. Install it from https://docs.docker.com/buildx/working-with-buildx/") - } - - // Check if we have a builder, create one if not - cmd = exec.CommandContext(ctx, "docker", "buildx", "inspect", "--bootstrap") - if err := cmd.Run(); err != nil { - // Try to create a builder - cmd = exec.CommandContext(ctx, "docker", "buildx", "create", "--use", "--bootstrap") - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("docker: failed to create buildx builder: %w", err) - } - } - - return nil -} diff --git a/pkg/build/builders/go.go b/pkg/build/builders/go.go deleted file mode 100644 index 63275d9..0000000 --- a/pkg/build/builders/go.go +++ /dev/null @@ -1,128 +0,0 @@ -// Package builders provides build implementations for different project types. -package builders - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/build" -) - -// GoBuilder implements the Builder interface for Go projects. -type GoBuilder struct{} - -// NewGoBuilder creates a new GoBuilder instance. -func NewGoBuilder() *GoBuilder { - return &GoBuilder{} -} - -// Name returns the builder's identifier. -func (b *GoBuilder) Name() string { - return "go" -} - -// Detect checks if this builder can handle the project in the given directory. -// Uses IsGoProject from the build package which checks for go.mod or wails.json. -func (b *GoBuilder) Detect(dir string) (bool, error) { - return build.IsGoProject(dir), nil -} - -// Build compiles the Go project for the specified targets. -// It sets GOOS, GOARCH, and CGO_ENABLED environment variables, -// applies ldflags and trimpath, and runs go build. -func (b *GoBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) { - if cfg == nil { - return nil, fmt.Errorf("builders.GoBuilder.Build: config is nil") - } - - if len(targets) == 0 { - return nil, fmt.Errorf("builders.GoBuilder.Build: no targets specified") - } - - // Ensure output directory exists - if err := os.MkdirAll(cfg.OutputDir, 0755); err != nil { - return nil, fmt.Errorf("builders.GoBuilder.Build: failed to create output directory: %w", err) - } - - var artifacts []build.Artifact - - for _, target := range targets { - artifact, err := b.buildTarget(ctx, cfg, target) - if err != nil { - return artifacts, fmt.Errorf("builders.GoBuilder.Build: failed to build %s: %w", target.String(), err) - } - artifacts = append(artifacts, artifact) - } - - return artifacts, nil -} - -// buildTarget compiles for a single target platform. -func (b *GoBuilder) buildTarget(ctx context.Context, cfg *build.Config, target build.Target) (build.Artifact, error) { - // Determine output binary name - binaryName := cfg.Name - if binaryName == "" { - binaryName = filepath.Base(cfg.ProjectDir) - } - - // Add .exe extension for Windows - if target.OS == "windows" && !strings.HasSuffix(binaryName, ".exe") { - binaryName += ".exe" - } - - // Create platform-specific output path: output/os_arch/binary - platformDir := filepath.Join(cfg.OutputDir, fmt.Sprintf("%s_%s", target.OS, target.Arch)) - if err := os.MkdirAll(platformDir, 0755); err != nil { - return build.Artifact{}, fmt.Errorf("failed to create platform directory: %w", err) - } - - outputPath := filepath.Join(platformDir, binaryName) - - // Build the go build arguments - args := []string{"build"} - - // Add trimpath flag - args = append(args, "-trimpath") - - // Add ldflags if specified - if len(cfg.LDFlags) > 0 { - ldflags := strings.Join(cfg.LDFlags, " ") - args = append(args, "-ldflags", ldflags) - } - - // Add output path - args = append(args, "-o", outputPath) - - // Add the project directory as the build target (current directory) - args = append(args, ".") - - // Create the command - cmd := exec.CommandContext(ctx, "go", args...) - cmd.Dir = cfg.ProjectDir - - // Set up environment - env := os.Environ() - env = append(env, fmt.Sprintf("GOOS=%s", target.OS)) - env = append(env, fmt.Sprintf("GOARCH=%s", target.Arch)) - env = append(env, "CGO_ENABLED=0") // CGO disabled by default for cross-compilation - cmd.Env = env - - // Capture output for error messages - output, err := cmd.CombinedOutput() - if err != nil { - return build.Artifact{}, fmt.Errorf("go build failed: %w\nOutput: %s", err, string(output)) - } - - return build.Artifact{ - Path: outputPath, - OS: target.OS, - Arch: target.Arch, - }, nil -} - -// Ensure GoBuilder implements the Builder interface. -var _ build.Builder = (*GoBuilder)(nil) diff --git a/pkg/build/builders/go_test.go b/pkg/build/builders/go_test.go deleted file mode 100644 index c46ad3b..0000000 --- a/pkg/build/builders/go_test.go +++ /dev/null @@ -1,385 +0,0 @@ -package builders - -import ( - "context" - "os" - "path/filepath" - "runtime" - "testing" - - "github.com/host-uk/core/pkg/build" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// setupGoTestProject creates a minimal Go project for testing. -func setupGoTestProject(t *testing.T) string { - t.Helper() - dir := t.TempDir() - - // Create a minimal go.mod - goMod := `module testproject - -go 1.21 -` - err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte(goMod), 0644) - require.NoError(t, err) - - // Create a minimal main.go - mainGo := `package main - -func main() { - println("hello") -} -` - err = os.WriteFile(filepath.Join(dir, "main.go"), []byte(mainGo), 0644) - require.NoError(t, err) - - return dir -} - -func TestGoBuilder_Name_Good(t *testing.T) { - builder := NewGoBuilder() - assert.Equal(t, "go", builder.Name()) -} - -func TestGoBuilder_Detect_Good(t *testing.T) { - t.Run("detects Go project with go.mod", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module test"), 0644) - require.NoError(t, err) - - builder := NewGoBuilder() - detected, err := builder.Detect(dir) - assert.NoError(t, err) - assert.True(t, detected) - }) - - t.Run("detects Wails project", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "wails.json"), []byte("{}"), 0644) - require.NoError(t, err) - - builder := NewGoBuilder() - detected, err := builder.Detect(dir) - assert.NoError(t, err) - assert.True(t, detected) - }) - - t.Run("returns false for non-Go project", func(t *testing.T) { - dir := t.TempDir() - // Create a Node.js project instead - err := os.WriteFile(filepath.Join(dir, "package.json"), []byte("{}"), 0644) - require.NoError(t, err) - - builder := NewGoBuilder() - detected, err := builder.Detect(dir) - assert.NoError(t, err) - assert.False(t, detected) - }) - - t.Run("returns false for empty directory", func(t *testing.T) { - dir := t.TempDir() - - builder := NewGoBuilder() - detected, err := builder.Detect(dir) - assert.NoError(t, err) - assert.False(t, detected) - }) -} - -func TestGoBuilder_Build_Good(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test in short mode") - } - - t.Run("builds for current platform", func(t *testing.T) { - projectDir := setupGoTestProject(t) - outputDir := t.TempDir() - - builder := NewGoBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: outputDir, - Name: "testbinary", - } - targets := []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, - } - - artifacts, err := builder.Build(context.Background(), cfg, targets) - require.NoError(t, err) - require.Len(t, artifacts, 1) - - // Verify artifact properties - artifact := artifacts[0] - assert.Equal(t, runtime.GOOS, artifact.OS) - assert.Equal(t, runtime.GOARCH, artifact.Arch) - - // Verify binary was created - assert.FileExists(t, artifact.Path) - - // Verify the path is in the expected location - expectedName := "testbinary" - if runtime.GOOS == "windows" { - expectedName += ".exe" - } - assert.Contains(t, artifact.Path, expectedName) - }) - - t.Run("builds multiple targets", func(t *testing.T) { - projectDir := setupGoTestProject(t) - outputDir := t.TempDir() - - builder := NewGoBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: outputDir, - Name: "multitest", - } - targets := []build.Target{ - {OS: "linux", Arch: "amd64"}, - {OS: "linux", Arch: "arm64"}, - } - - artifacts, err := builder.Build(context.Background(), cfg, targets) - require.NoError(t, err) - require.Len(t, artifacts, 2) - - // Verify both artifacts were created - for i, artifact := range artifacts { - assert.Equal(t, targets[i].OS, artifact.OS) - assert.Equal(t, targets[i].Arch, artifact.Arch) - assert.FileExists(t, artifact.Path) - } - }) - - t.Run("adds .exe extension for Windows", func(t *testing.T) { - projectDir := setupGoTestProject(t) - outputDir := t.TempDir() - - builder := NewGoBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: outputDir, - Name: "wintest", - } - targets := []build.Target{ - {OS: "windows", Arch: "amd64"}, - } - - artifacts, err := builder.Build(context.Background(), cfg, targets) - require.NoError(t, err) - require.Len(t, artifacts, 1) - - // Verify .exe extension - assert.True(t, filepath.Ext(artifacts[0].Path) == ".exe") - assert.FileExists(t, artifacts[0].Path) - }) - - t.Run("uses directory name when Name not specified", func(t *testing.T) { - projectDir := setupGoTestProject(t) - outputDir := t.TempDir() - - builder := NewGoBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: outputDir, - Name: "", // Empty name - } - targets := []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, - } - - artifacts, err := builder.Build(context.Background(), cfg, targets) - require.NoError(t, err) - require.Len(t, artifacts, 1) - - // Binary should use the project directory base name - baseName := filepath.Base(projectDir) - if runtime.GOOS == "windows" { - baseName += ".exe" - } - assert.Contains(t, artifacts[0].Path, baseName) - }) - - t.Run("applies ldflags", func(t *testing.T) { - projectDir := setupGoTestProject(t) - outputDir := t.TempDir() - - builder := NewGoBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: outputDir, - Name: "ldflagstest", - LDFlags: []string{"-s", "-w"}, // Strip debug info - } - targets := []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, - } - - artifacts, err := builder.Build(context.Background(), cfg, targets) - require.NoError(t, err) - require.Len(t, artifacts, 1) - assert.FileExists(t, artifacts[0].Path) - }) - - t.Run("creates output directory if missing", func(t *testing.T) { - projectDir := setupGoTestProject(t) - outputDir := filepath.Join(t.TempDir(), "nested", "output") - - builder := NewGoBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: outputDir, - Name: "nestedtest", - } - targets := []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, - } - - artifacts, err := builder.Build(context.Background(), cfg, targets) - require.NoError(t, err) - require.Len(t, artifacts, 1) - assert.FileExists(t, artifacts[0].Path) - assert.DirExists(t, outputDir) - }) -} - -func TestGoBuilder_Build_Bad(t *testing.T) { - t.Run("returns error for nil config", func(t *testing.T) { - builder := NewGoBuilder() - - artifacts, err := builder.Build(context.Background(), nil, []build.Target{{OS: "linux", Arch: "amd64"}}) - assert.Error(t, err) - assert.Nil(t, artifacts) - assert.Contains(t, err.Error(), "config is nil") - }) - - t.Run("returns error for empty targets", func(t *testing.T) { - projectDir := setupGoTestProject(t) - - builder := NewGoBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: t.TempDir(), - Name: "test", - } - - artifacts, err := builder.Build(context.Background(), cfg, []build.Target{}) - assert.Error(t, err) - assert.Nil(t, artifacts) - assert.Contains(t, err.Error(), "no targets specified") - }) - - t.Run("returns error for invalid project directory", func(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test in short mode") - } - - builder := NewGoBuilder() - cfg := &build.Config{ - ProjectDir: "/nonexistent/path", - OutputDir: t.TempDir(), - Name: "test", - } - targets := []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, - } - - artifacts, err := builder.Build(context.Background(), cfg, targets) - assert.Error(t, err) - assert.Empty(t, artifacts) - }) - - t.Run("returns error for invalid Go code", func(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test in short mode") - } - - dir := t.TempDir() - - // Create go.mod - err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module test\n\ngo 1.21"), 0644) - require.NoError(t, err) - - // Create invalid Go code - err = os.WriteFile(filepath.Join(dir, "main.go"), []byte("this is not valid go code"), 0644) - require.NoError(t, err) - - builder := NewGoBuilder() - cfg := &build.Config{ - ProjectDir: dir, - OutputDir: t.TempDir(), - Name: "test", - } - targets := []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, - } - - artifacts, err := builder.Build(context.Background(), cfg, targets) - assert.Error(t, err) - assert.Contains(t, err.Error(), "go build failed") - assert.Empty(t, artifacts) - }) - - t.Run("returns partial artifacts on partial failure", func(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test in short mode") - } - - // Create a project that will fail on one target - // Using an invalid arch for linux - projectDir := setupGoTestProject(t) - outputDir := t.TempDir() - - builder := NewGoBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: outputDir, - Name: "partialtest", - } - targets := []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, // This should succeed - {OS: "linux", Arch: "invalid_arch"}, // This should fail - } - - artifacts, err := builder.Build(context.Background(), cfg, targets) - // Should return error for the failed build - assert.Error(t, err) - // Should have the successful artifact - assert.Len(t, artifacts, 1) - }) - - t.Run("respects context cancellation", func(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test in short mode") - } - - projectDir := setupGoTestProject(t) - - builder := NewGoBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: t.TempDir(), - Name: "canceltest", - } - targets := []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, - } - - // Create an already cancelled context - ctx, cancel := context.WithCancel(context.Background()) - cancel() - - artifacts, err := builder.Build(ctx, cfg, targets) - assert.Error(t, err) - assert.Empty(t, artifacts) - }) -} - -func TestGoBuilder_Interface_Good(t *testing.T) { - // Verify GoBuilder implements Builder interface - var _ build.Builder = (*GoBuilder)(nil) - var _ build.Builder = NewGoBuilder() -} diff --git a/pkg/build/builders/linuxkit.go b/pkg/build/builders/linuxkit.go deleted file mode 100644 index 5d2e913..0000000 --- a/pkg/build/builders/linuxkit.go +++ /dev/null @@ -1,248 +0,0 @@ -// Package builders provides build implementations for different project types. -package builders - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/build" -) - -// LinuxKitBuilder builds LinuxKit images. -type LinuxKitBuilder struct{} - -// NewLinuxKitBuilder creates a new LinuxKit builder. -func NewLinuxKitBuilder() *LinuxKitBuilder { - return &LinuxKitBuilder{} -} - -// Name returns the builder's identifier. -func (b *LinuxKitBuilder) Name() string { - return "linuxkit" -} - -// Detect checks if a linuxkit.yml or .yml config exists in the directory. -func (b *LinuxKitBuilder) Detect(dir string) (bool, error) { - // Check for linuxkit.yml - if _, err := os.Stat(filepath.Join(dir, "linuxkit.yml")); err == nil { - return true, nil - } - // Check for .core/linuxkit/*.yml - if matches, _ := filepath.Glob(filepath.Join(dir, ".core", "linuxkit", "*.yml")); len(matches) > 0 { - return true, nil - } - return false, nil -} - -// Build builds LinuxKit images for the specified targets. -func (b *LinuxKitBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) { - // Validate linuxkit CLI is available - if err := b.validateLinuxKitCli(); err != nil { - return nil, err - } - - // Determine config file path - configPath := cfg.LinuxKitConfig - if configPath == "" { - // Auto-detect - if _, err := os.Stat(filepath.Join(cfg.ProjectDir, "linuxkit.yml")); err == nil { - configPath = filepath.Join(cfg.ProjectDir, "linuxkit.yml") - } else { - // Look in .core/linuxkit/ - matches, _ := filepath.Glob(filepath.Join(cfg.ProjectDir, ".core", "linuxkit", "*.yml")) - if len(matches) > 0 { - configPath = matches[0] - } - } - } - - if configPath == "" { - return nil, fmt.Errorf("linuxkit.Build: no LinuxKit config file found. Specify with --config or create linuxkit.yml") - } - - // Validate config file exists - if _, err := os.Stat(configPath); err != nil { - return nil, fmt.Errorf("linuxkit.Build: config file not found: %s", configPath) - } - - // Determine output formats - formats := cfg.Formats - if len(formats) == 0 { - formats = []string{"qcow2-bios"} // Default to QEMU-compatible format - } - - // Create output directory - outputDir := cfg.OutputDir - if outputDir == "" { - outputDir = filepath.Join(cfg.ProjectDir, "dist") - } - if err := os.MkdirAll(outputDir, 0755); err != nil { - return nil, fmt.Errorf("linuxkit.Build: failed to create output directory: %w", err) - } - - // Determine base name from config file or project name - baseName := cfg.Name - if baseName == "" { - baseName = strings.TrimSuffix(filepath.Base(configPath), ".yml") - } - - // If no targets, default to linux/amd64 - if len(targets) == 0 { - targets = []build.Target{{OS: "linux", Arch: "amd64"}} - } - - var artifacts []build.Artifact - - // Build for each target and format - for _, target := range targets { - // LinuxKit only supports Linux - if target.OS != "linux" { - fmt.Printf("Skipping %s/%s (LinuxKit only supports Linux)\n", target.OS, target.Arch) - continue - } - - for _, format := range formats { - outputName := fmt.Sprintf("%s-%s", baseName, target.Arch) - - args := b.buildLinuxKitArgs(configPath, format, outputName, outputDir, target.Arch) - - cmd := exec.CommandContext(ctx, "linuxkit", args...) - cmd.Dir = cfg.ProjectDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - fmt.Printf("Building LinuxKit image: %s (%s, %s)\n", outputName, format, target.Arch) - - if err := cmd.Run(); err != nil { - return nil, fmt.Errorf("linuxkit.Build: build failed for %s/%s: %w", target.Arch, format, err) - } - - // Determine the actual output file path - artifactPath := b.getArtifactPath(outputDir, outputName, format) - - // Verify the artifact was created - if _, err := os.Stat(artifactPath); err != nil { - // Try alternate naming conventions - artifactPath = b.findArtifact(outputDir, outputName, format) - if artifactPath == "" { - return nil, fmt.Errorf("linuxkit.Build: artifact not found after build: expected %s", b.getArtifactPath(outputDir, outputName, format)) - } - } - - artifacts = append(artifacts, build.Artifact{ - Path: artifactPath, - OS: target.OS, - Arch: target.Arch, - }) - } - } - - return artifacts, nil -} - -// buildLinuxKitArgs builds the arguments for linuxkit build command. -func (b *LinuxKitBuilder) buildLinuxKitArgs(configPath, format, outputName, outputDir, arch string) []string { - args := []string{"build"} - - // Output format - args = append(args, "--format", format) - - // Output name - args = append(args, "--name", outputName) - - // Output directory - args = append(args, "--dir", outputDir) - - // Architecture (if not amd64) - if arch != "amd64" { - args = append(args, "--arch", arch) - } - - // Config file - args = append(args, configPath) - - return args -} - -// getArtifactPath returns the expected path of the built artifact. -func (b *LinuxKitBuilder) getArtifactPath(outputDir, outputName, format string) string { - ext := b.getFormatExtension(format) - return filepath.Join(outputDir, outputName+ext) -} - -// findArtifact searches for the built artifact with various naming conventions. -func (b *LinuxKitBuilder) findArtifact(outputDir, outputName, format string) string { - // LinuxKit can create files with different suffixes - extensions := []string{ - b.getFormatExtension(format), - "-bios" + b.getFormatExtension(format), - "-efi" + b.getFormatExtension(format), - } - - for _, ext := range extensions { - path := filepath.Join(outputDir, outputName+ext) - if _, err := os.Stat(path); err == nil { - return path - } - } - - // Try to find any file matching the output name - matches, _ := filepath.Glob(filepath.Join(outputDir, outputName+"*")) - for _, match := range matches { - // Return first match that looks like an image - ext := filepath.Ext(match) - if ext == ".iso" || ext == ".qcow2" || ext == ".raw" || ext == ".vmdk" || ext == ".vhd" { - return match - } - } - - return "" -} - -// getFormatExtension returns the file extension for a LinuxKit output format. -func (b *LinuxKitBuilder) getFormatExtension(format string) string { - switch format { - case "iso", "iso-bios", "iso-efi": - return ".iso" - case "raw", "raw-bios", "raw-efi": - return ".raw" - case "qcow2", "qcow2-bios", "qcow2-efi": - return ".qcow2" - case "vmdk": - return ".vmdk" - case "vhd": - return ".vhd" - case "gcp": - return ".img.tar.gz" - case "aws": - return ".raw" - default: - return "." + strings.TrimSuffix(format, "-bios") - } -} - -// validateLinuxKitCli checks if the linuxkit CLI is available. -func (b *LinuxKitBuilder) validateLinuxKitCli() error { - // Check PATH first - if _, err := exec.LookPath("linuxkit"); err == nil { - return nil - } - - // Check common locations - paths := []string{ - "/usr/local/bin/linuxkit", - "/opt/homebrew/bin/linuxkit", - } - - for _, p := range paths { - if _, err := os.Stat(p); err == nil { - return nil - } - } - - return fmt.Errorf("linuxkit: linuxkit CLI not found. Install with: brew install linuxkit (macOS) or see https://github.com/linuxkit/linuxkit") -} diff --git a/pkg/build/builders/taskfile.go b/pkg/build/builders/taskfile.go deleted file mode 100644 index 41888ab..0000000 --- a/pkg/build/builders/taskfile.go +++ /dev/null @@ -1,264 +0,0 @@ -// Package builders provides build implementations for different project types. -package builders - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/build" -) - -// TaskfileBuilder builds projects using Taskfile (https://taskfile.dev/). -// This is a generic builder that can handle any project type that has a Taskfile. -type TaskfileBuilder struct{} - -// NewTaskfileBuilder creates a new Taskfile builder. -func NewTaskfileBuilder() *TaskfileBuilder { - return &TaskfileBuilder{} -} - -// Name returns the builder's identifier. -func (b *TaskfileBuilder) Name() string { - return "taskfile" -} - -// Detect checks if a Taskfile exists in the directory. -func (b *TaskfileBuilder) Detect(dir string) (bool, error) { - // Check for Taskfile.yml, Taskfile.yaml, or Taskfile - taskfiles := []string{ - "Taskfile.yml", - "Taskfile.yaml", - "Taskfile", - "taskfile.yml", - "taskfile.yaml", - } - - for _, tf := range taskfiles { - if _, err := os.Stat(filepath.Join(dir, tf)); err == nil { - return true, nil - } - } - return false, nil -} - -// Build runs the Taskfile build task for each target platform. -func (b *TaskfileBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) { - // Validate task CLI is available - if err := b.validateTaskCli(); err != nil { - return nil, err - } - - // Create output directory - outputDir := cfg.OutputDir - if outputDir == "" { - outputDir = filepath.Join(cfg.ProjectDir, "dist") - } - if err := os.MkdirAll(outputDir, 0755); err != nil { - return nil, fmt.Errorf("taskfile.Build: failed to create output directory: %w", err) - } - - var artifacts []build.Artifact - - // If no targets specified, just run the build task once - if len(targets) == 0 { - if err := b.runTask(ctx, cfg, "", ""); err != nil { - return nil, err - } - - // Try to find artifacts in output directory - found := b.findArtifacts(outputDir) - artifacts = append(artifacts, found...) - } else { - // Run build task for each target - for _, target := range targets { - if err := b.runTask(ctx, cfg, target.OS, target.Arch); err != nil { - return nil, err - } - - // Try to find artifacts for this target - found := b.findArtifactsForTarget(outputDir, target) - artifacts = append(artifacts, found...) - } - } - - return artifacts, nil -} - -// runTask executes the Taskfile build task. -func (b *TaskfileBuilder) runTask(ctx context.Context, cfg *build.Config, goos, goarch string) error { - // Build task command - args := []string{"build"} - - // Pass variables if targets are specified - if goos != "" { - args = append(args, fmt.Sprintf("GOOS=%s", goos)) - } - if goarch != "" { - args = append(args, fmt.Sprintf("GOARCH=%s", goarch)) - } - if cfg.OutputDir != "" { - args = append(args, fmt.Sprintf("OUTPUT_DIR=%s", cfg.OutputDir)) - } - if cfg.Name != "" { - args = append(args, fmt.Sprintf("NAME=%s", cfg.Name)) - } - if cfg.Version != "" { - args = append(args, fmt.Sprintf("VERSION=%s", cfg.Version)) - } - - cmd := exec.CommandContext(ctx, "task", args...) - cmd.Dir = cfg.ProjectDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - // Set environment variables - cmd.Env = os.Environ() - if goos != "" { - cmd.Env = append(cmd.Env, fmt.Sprintf("GOOS=%s", goos)) - } - if goarch != "" { - cmd.Env = append(cmd.Env, fmt.Sprintf("GOARCH=%s", goarch)) - } - if cfg.OutputDir != "" { - cmd.Env = append(cmd.Env, fmt.Sprintf("OUTPUT_DIR=%s", cfg.OutputDir)) - } - if cfg.Name != "" { - cmd.Env = append(cmd.Env, fmt.Sprintf("NAME=%s", cfg.Name)) - } - if cfg.Version != "" { - cmd.Env = append(cmd.Env, fmt.Sprintf("VERSION=%s", cfg.Version)) - } - - if goos != "" && goarch != "" { - fmt.Printf("Running task build for %s/%s\n", goos, goarch) - } else { - fmt.Println("Running task build") - } - - if err := cmd.Run(); err != nil { - return fmt.Errorf("taskfile.Build: task build failed: %w", err) - } - - return nil -} - -// findArtifacts searches for built artifacts in the output directory. -func (b *TaskfileBuilder) findArtifacts(outputDir string) []build.Artifact { - var artifacts []build.Artifact - - entries, err := os.ReadDir(outputDir) - if err != nil { - return artifacts - } - - for _, entry := range entries { - if entry.IsDir() { - continue - } - - // Skip common non-artifact files - name := entry.Name() - if strings.HasPrefix(name, ".") || name == "CHECKSUMS.txt" { - continue - } - - artifacts = append(artifacts, build.Artifact{ - Path: filepath.Join(outputDir, name), - OS: "", - Arch: "", - }) - } - - return artifacts -} - -// findArtifactsForTarget searches for built artifacts for a specific target. -func (b *TaskfileBuilder) findArtifactsForTarget(outputDir string, target build.Target) []build.Artifact { - var artifacts []build.Artifact - - // 1. Look for platform-specific subdirectory: output/os_arch/ - platformSubdir := filepath.Join(outputDir, fmt.Sprintf("%s_%s", target.OS, target.Arch)) - if info, err := os.Stat(platformSubdir); err == nil && info.IsDir() { - entries, _ := os.ReadDir(platformSubdir) - for _, entry := range entries { - if entry.IsDir() { - // Handle .app bundles on macOS - if target.OS == "darwin" && strings.HasSuffix(entry.Name(), ".app") { - artifacts = append(artifacts, build.Artifact{ - Path: filepath.Join(platformSubdir, entry.Name()), - OS: target.OS, - Arch: target.Arch, - }) - } - continue - } - // Skip hidden files - if strings.HasPrefix(entry.Name(), ".") { - continue - } - artifacts = append(artifacts, build.Artifact{ - Path: filepath.Join(platformSubdir, entry.Name()), - OS: target.OS, - Arch: target.Arch, - }) - } - if len(artifacts) > 0 { - return artifacts - } - } - - // 2. Look for files matching the target pattern in the root output dir - patterns := []string{ - fmt.Sprintf("*-%s-%s*", target.OS, target.Arch), - fmt.Sprintf("*_%s_%s*", target.OS, target.Arch), - fmt.Sprintf("*-%s*", target.Arch), - } - - for _, pattern := range patterns { - matches, _ := filepath.Glob(filepath.Join(outputDir, pattern)) - for _, match := range matches { - info, err := os.Stat(match) - if err != nil || info.IsDir() { - continue - } - - artifacts = append(artifacts, build.Artifact{ - Path: match, - OS: target.OS, - Arch: target.Arch, - }) - } - - if len(artifacts) > 0 { - break // Found matches, stop looking - } - } - - return artifacts -} - -// validateTaskCli checks if the task CLI is available. -func (b *TaskfileBuilder) validateTaskCli() error { - // Check PATH first - if _, err := exec.LookPath("task"); err == nil { - return nil - } - - // Check common locations - paths := []string{ - "/usr/local/bin/task", - "/opt/homebrew/bin/task", - } - - for _, p := range paths { - if _, err := os.Stat(p); err == nil { - return nil - } - } - - return fmt.Errorf("taskfile: task CLI not found. Install with: brew install go-task (macOS), go install github.com/go-task/task/v3/cmd/task@latest, or see https://taskfile.dev/installation/") -} diff --git a/pkg/build/builders/wails.go b/pkg/build/builders/wails.go deleted file mode 100644 index 66601a0..0000000 --- a/pkg/build/builders/wails.go +++ /dev/null @@ -1,271 +0,0 @@ -// Package builders provides build implementations for different project types. -package builders - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/build" -) - -// WailsBuilder implements the Builder interface for Wails v3 projects. -type WailsBuilder struct{} - -// NewWailsBuilder creates a new WailsBuilder instance. -func NewWailsBuilder() *WailsBuilder { - return &WailsBuilder{} -} - -// Name returns the builder's identifier. -func (b *WailsBuilder) Name() string { - return "wails" -} - -// Detect checks if this builder can handle the project in the given directory. -// Uses IsWailsProject from the build package which checks for wails.json. -func (b *WailsBuilder) Detect(dir string) (bool, error) { - return build.IsWailsProject(dir), nil -} - -// Build compiles the Wails project for the specified targets. -// It detects the Wails version and chooses the appropriate build strategy: -// - Wails v3: Delegates to Taskfile (error if missing) -// - Wails v2: Uses 'wails build' command -func (b *WailsBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) { - if cfg == nil { - return nil, fmt.Errorf("builders.WailsBuilder.Build: config is nil") - } - - if len(targets) == 0 { - return nil, fmt.Errorf("builders.WailsBuilder.Build: no targets specified") - } - - // Detect Wails version - isV3 := b.isWailsV3(cfg.ProjectDir) - - if isV3 { - // Wails v3 strategy: Delegate to Taskfile - taskBuilder := NewTaskfileBuilder() - if detected, _ := taskBuilder.Detect(cfg.ProjectDir); detected { - return taskBuilder.Build(ctx, cfg, targets) - } - return nil, fmt.Errorf("Wails v3 projects require a Taskfile for building") - } - - // Wails v2 strategy: Use 'wails build' - // Ensure output directory exists - if err := os.MkdirAll(cfg.OutputDir, 0755); err != nil { - return nil, fmt.Errorf("builders.WailsBuilder.Build: failed to create output directory: %w", err) - } - - // Note: Wails v2 handles frontend installation/building automatically via wails.json config - - var artifacts []build.Artifact - - for _, target := range targets { - artifact, err := b.buildV2Target(ctx, cfg, target) - if err != nil { - return artifacts, fmt.Errorf("builders.WailsBuilder.Build: failed to build %s: %w", target.String(), err) - } - artifacts = append(artifacts, artifact) - } - - return artifacts, nil -} - -// isWailsV3 checks if the project uses Wails v3 by inspecting go.mod. -func (b *WailsBuilder) isWailsV3(dir string) bool { - goModPath := filepath.Join(dir, "go.mod") - data, err := os.ReadFile(goModPath) - if err != nil { - return false - } - return strings.Contains(string(data), "github.com/wailsapp/wails/v3") -} - -// buildV2Target compiles for a single target platform using wails (v2). -func (b *WailsBuilder) buildV2Target(ctx context.Context, cfg *build.Config, target build.Target) (build.Artifact, error) { - // Determine output binary name - binaryName := cfg.Name - if binaryName == "" { - binaryName = filepath.Base(cfg.ProjectDir) - } - - // Build the wails build arguments - args := []string{"build"} - - // Platform - args = append(args, "-platform", fmt.Sprintf("%s/%s", target.OS, target.Arch)) - - // Output (Wails v2 uses -o for the binary name, relative to build/bin usually, but we want to control it) - // Actually, Wails v2 is opinionated about output dir (build/bin). - // We might need to copy artifacts after build if we want them in cfg.OutputDir. - // For now, let's try to let Wails do its thing and find the artifact. - - // Create the command - cmd := exec.CommandContext(ctx, "wails", args...) - cmd.Dir = cfg.ProjectDir - - // Capture output for error messages - output, err := cmd.CombinedOutput() - if err != nil { - return build.Artifact{}, fmt.Errorf("wails build failed: %w\nOutput: %s", err, string(output)) - } - - // Wails v2 typically outputs to build/bin - // We need to move/copy it to our desired output dir - - // Construct the source path where Wails v2 puts the binary - wailsOutputDir := filepath.Join(cfg.ProjectDir, "build", "bin") - - // Find the artifact in Wails output dir - sourcePath, err := b.findArtifact(wailsOutputDir, binaryName, target) - if err != nil { - return build.Artifact{}, fmt.Errorf("failed to find Wails v2 build artifact: %w", err) - } - - // Move/Copy to our output dir - // Create platform specific dir in our output - platformDir := filepath.Join(cfg.OutputDir, fmt.Sprintf("%s_%s", target.OS, target.Arch)) - if err := os.MkdirAll(platformDir, 0755); err != nil { - return build.Artifact{}, fmt.Errorf("failed to create output dir: %w", err) - } - - destPath := filepath.Join(platformDir, filepath.Base(sourcePath)) - - // Simple copy - input, err := os.ReadFile(sourcePath) - if err != nil { - return build.Artifact{}, err - } - if err := os.WriteFile(destPath, input, 0755); err != nil { - return build.Artifact{}, err - } - - return build.Artifact{ - Path: destPath, - OS: target.OS, - Arch: target.Arch, - }, nil -} - -// findArtifact locates the built artifact based on the target platform. -func (b *WailsBuilder) findArtifact(platformDir, binaryName string, target build.Target) (string, error) { - var candidates []string - - switch target.OS { - case "windows": - // Look for NSIS installer first, then plain exe - candidates = []string{ - filepath.Join(platformDir, binaryName+"-installer.exe"), - filepath.Join(platformDir, binaryName+".exe"), - filepath.Join(platformDir, binaryName+"-amd64-installer.exe"), - } - case "darwin": - // Look for .dmg, then .app bundle, then plain binary - candidates = []string{ - filepath.Join(platformDir, binaryName+".dmg"), - filepath.Join(platformDir, binaryName+".app"), - filepath.Join(platformDir, binaryName), - } - default: - // Linux and others: look for plain binary - candidates = []string{ - filepath.Join(platformDir, binaryName), - } - } - - // Try each candidate - for _, candidate := range candidates { - if fileOrDirExists(candidate) { - return candidate, nil - } - } - - // If no specific candidate found, try to find any executable or package in the directory - entries, err := os.ReadDir(platformDir) - if err != nil { - return "", fmt.Errorf("failed to read platform directory: %w", err) - } - - for _, entry := range entries { - name := entry.Name() - // Skip common non-artifact files - if strings.HasSuffix(name, ".go") || strings.HasSuffix(name, ".json") { - continue - } - - path := filepath.Join(platformDir, name) - info, err := entry.Info() - if err != nil { - continue - } - - // On Unix, check if it's executable; on Windows, check for .exe - if target.OS == "windows" { - if strings.HasSuffix(name, ".exe") { - return path, nil - } - } else if info.Mode()&0111 != 0 || entry.IsDir() { - // Executable file or directory (.app bundle) - return path, nil - } - } - - return "", fmt.Errorf("no artifact found in %s", platformDir) -} - -// detectPackageManager detects the frontend package manager based on lock files. -// Returns "bun", "pnpm", "yarn", or "npm" (default). -func detectPackageManager(dir string) string { - // Check in priority order: bun, pnpm, yarn, npm - lockFiles := []struct { - file string - manager string - }{ - {"bun.lockb", "bun"}, - {"pnpm-lock.yaml", "pnpm"}, - {"yarn.lock", "yarn"}, - {"package-lock.json", "npm"}, - } - - for _, lf := range lockFiles { - if fileExists(filepath.Join(dir, lf.file)) { - return lf.manager - } - } - - // Default to npm if no lock file found - return "npm" -} - -// fileExists checks if a file exists and is not a directory. -func fileExists(path string) bool { - info, err := os.Stat(path) - if err != nil { - return false - } - return !info.IsDir() -} - -// dirExists checks if a directory exists. -func dirExists(path string) bool { - info, err := os.Stat(path) - if err != nil { - return false - } - return info.IsDir() -} - -// fileOrDirExists checks if a file or directory exists. -func fileOrDirExists(path string) bool { - _, err := os.Stat(path) - return err == nil -} - -// Ensure WailsBuilder implements the Builder interface. -var _ build.Builder = (*WailsBuilder)(nil) diff --git a/pkg/build/builders/wails_test.go b/pkg/build/builders/wails_test.go deleted file mode 100644 index db643be..0000000 --- a/pkg/build/builders/wails_test.go +++ /dev/null @@ -1,438 +0,0 @@ -package builders - -import ( - "context" - "os" - "os/exec" - "path/filepath" - "runtime" - "testing" - - "github.com/host-uk/core/pkg/build" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// setupWailsTestProject creates a minimal Wails project structure for testing. -func setupWailsTestProject(t *testing.T) string { - t.Helper() - dir := t.TempDir() - - // Create wails.json - wailsJSON := `{ - "name": "testapp", - "outputfilename": "testapp" -}` - err := os.WriteFile(filepath.Join(dir, "wails.json"), []byte(wailsJSON), 0644) - require.NoError(t, err) - - // Create a minimal go.mod - goMod := `module testapp - -go 1.21 - -require github.com/wailsapp/wails/v3 v3.0.0 -` - err = os.WriteFile(filepath.Join(dir, "go.mod"), []byte(goMod), 0644) - require.NoError(t, err) - - // Create a minimal main.go - mainGo := `package main - -func main() { - println("hello wails") -} -` - err = os.WriteFile(filepath.Join(dir, "main.go"), []byte(mainGo), 0644) - require.NoError(t, err) - - // Create a minimal Taskfile.yml - taskfile := `version: '3' -tasks: - build: - cmds: - - mkdir -p {{.OUTPUT_DIR}}/{{.GOOS}}_{{.GOARCH}} - - touch {{.OUTPUT_DIR}}/{{.GOOS}}_{{.GOARCH}}/testapp -` - err = os.WriteFile(filepath.Join(dir, "Taskfile.yml"), []byte(taskfile), 0644) - require.NoError(t, err) - - return dir -} - -// setupWailsTestProjectWithFrontend creates a Wails project with frontend directory. -func setupWailsTestProjectWithFrontend(t *testing.T, lockFile string) string { - t.Helper() - dir := setupWailsTestProject(t) - - // Create frontend directory - frontendDir := filepath.Join(dir, "frontend") - err := os.MkdirAll(frontendDir, 0755) - require.NoError(t, err) - - // Create package.json - packageJSON := `{ - "name": "frontend", - "scripts": { - "build": "echo building frontend" - } -}` - err = os.WriteFile(filepath.Join(frontendDir, "package.json"), []byte(packageJSON), 0644) - require.NoError(t, err) - - // Create lock file if specified - if lockFile != "" { - err = os.WriteFile(filepath.Join(frontendDir, lockFile), []byte(""), 0644) - require.NoError(t, err) - } - - return dir -} - -// setupWailsV2TestProject creates a Wails v2 project structure. -func setupWailsV2TestProject(t *testing.T) string { - t.Helper() - dir := t.TempDir() - - // wails.json - err := os.WriteFile(filepath.Join(dir, "wails.json"), []byte("{}"), 0644) - require.NoError(t, err) - - // go.mod with v2 - goMod := `module testapp -go 1.21 -require github.com/wailsapp/wails/v2 v2.8.0 -` - err = os.WriteFile(filepath.Join(dir, "go.mod"), []byte(goMod), 0644) - require.NoError(t, err) - - return dir -} - -func TestWailsBuilder_Build_Taskfile_Good(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test in short mode") - } - - // Check if task is available - if _, err := exec.LookPath("task"); err != nil { - t.Skip("task not installed, skipping test") - } - - t.Run("delegates to Taskfile if present", func(t *testing.T) { - projectDir := setupWailsTestProject(t) - outputDir := t.TempDir() - - // Create a Taskfile that just touches a file - taskfile := `version: '3' -tasks: - build: - cmds: - - mkdir -p {{.OUTPUT_DIR}}/{{.GOOS}}_{{.GOARCH}} - - touch {{.OUTPUT_DIR}}/{{.GOOS}}_{{.GOARCH}}/testapp -` - err := os.WriteFile(filepath.Join(projectDir, "Taskfile.yml"), []byte(taskfile), 0644) - require.NoError(t, err) - - builder := NewWailsBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: outputDir, - Name: "testapp", - } - targets := []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, - } - - artifacts, err := builder.Build(context.Background(), cfg, targets) - require.NoError(t, err) - assert.NotEmpty(t, artifacts) - }) -} - -func TestWailsBuilder_Name_Good(t *testing.T) { - builder := NewWailsBuilder() - assert.Equal(t, "wails", builder.Name()) -} - -func TestWailsBuilder_Build_V2_Good(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test in short mode") - } - - if _, err := exec.LookPath("wails"); err != nil { - t.Skip("wails not installed, skipping integration test") - } - - t.Run("builds v2 project", func(t *testing.T) { - projectDir := setupWailsV2TestProject(t) - outputDir := t.TempDir() - - builder := NewWailsBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: outputDir, - Name: "testapp", - } - targets := []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, - } - - // This will likely fail in a real run because we can't easily mock the full wails v2 build process - // (which needs a valid project with main.go etc). - // But it validates we are trying to run the command. - // For now, we expect an error but check it's the *right* error (from wails CLI) - _, err := builder.Build(context.Background(), cfg, targets) - if err != nil { - // If it fails, it should be because wails build failed, not because logic was wrong - // assert.Contains(t, err.Error(), "wails build failed") - } - }) -} - -func TestWailsBuilder_Detect_Good(t *testing.T) { - t.Run("detects Wails project with wails.json", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "wails.json"), []byte("{}"), 0644) - require.NoError(t, err) - - builder := NewWailsBuilder() - detected, err := builder.Detect(dir) - assert.NoError(t, err) - assert.True(t, detected) - }) - - t.Run("returns false for Go-only project", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module test"), 0644) - require.NoError(t, err) - - builder := NewWailsBuilder() - detected, err := builder.Detect(dir) - assert.NoError(t, err) - assert.False(t, detected) - }) - - t.Run("returns false for Node.js project", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "package.json"), []byte("{}"), 0644) - require.NoError(t, err) - - builder := NewWailsBuilder() - detected, err := builder.Detect(dir) - assert.NoError(t, err) - assert.False(t, detected) - }) - - t.Run("returns false for empty directory", func(t *testing.T) { - dir := t.TempDir() - - builder := NewWailsBuilder() - detected, err := builder.Detect(dir) - assert.NoError(t, err) - assert.False(t, detected) - }) -} - -func TestDetectPackageManager_Good(t *testing.T) { - t.Run("detects bun from bun.lockb", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "bun.lockb"), []byte(""), 0644) - require.NoError(t, err) - - result := detectPackageManager(dir) - assert.Equal(t, "bun", result) - }) - - t.Run("detects pnpm from pnpm-lock.yaml", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "pnpm-lock.yaml"), []byte(""), 0644) - require.NoError(t, err) - - result := detectPackageManager(dir) - assert.Equal(t, "pnpm", result) - }) - - t.Run("detects yarn from yarn.lock", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "yarn.lock"), []byte(""), 0644) - require.NoError(t, err) - - result := detectPackageManager(dir) - assert.Equal(t, "yarn", result) - }) - - t.Run("detects npm from package-lock.json", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "package-lock.json"), []byte(""), 0644) - require.NoError(t, err) - - result := detectPackageManager(dir) - assert.Equal(t, "npm", result) - }) - - t.Run("defaults to npm when no lock file", func(t *testing.T) { - dir := t.TempDir() - - result := detectPackageManager(dir) - assert.Equal(t, "npm", result) - }) - - t.Run("prefers bun over other lock files", func(t *testing.T) { - dir := t.TempDir() - // Create multiple lock files - require.NoError(t, os.WriteFile(filepath.Join(dir, "bun.lockb"), []byte(""), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(dir, "yarn.lock"), []byte(""), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(dir, "package-lock.json"), []byte(""), 0644)) - - result := detectPackageManager(dir) - assert.Equal(t, "bun", result) - }) - - t.Run("prefers pnpm over yarn and npm", func(t *testing.T) { - dir := t.TempDir() - // Create multiple lock files (no bun) - require.NoError(t, os.WriteFile(filepath.Join(dir, "pnpm-lock.yaml"), []byte(""), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(dir, "yarn.lock"), []byte(""), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(dir, "package-lock.json"), []byte(""), 0644)) - - result := detectPackageManager(dir) - assert.Equal(t, "pnpm", result) - }) - - t.Run("prefers yarn over npm", func(t *testing.T) { - dir := t.TempDir() - // Create multiple lock files (no bun or pnpm) - require.NoError(t, os.WriteFile(filepath.Join(dir, "yarn.lock"), []byte(""), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(dir, "package-lock.json"), []byte(""), 0644)) - - result := detectPackageManager(dir) - assert.Equal(t, "yarn", result) - }) -} - -func TestWailsBuilder_Build_Bad(t *testing.T) { - t.Run("returns error for nil config", func(t *testing.T) { - builder := NewWailsBuilder() - - artifacts, err := builder.Build(context.Background(), nil, []build.Target{{OS: "linux", Arch: "amd64"}}) - assert.Error(t, err) - assert.Nil(t, artifacts) - assert.Contains(t, err.Error(), "config is nil") - }) - - t.Run("returns error for empty targets", func(t *testing.T) { - projectDir := setupWailsTestProject(t) - - builder := NewWailsBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: t.TempDir(), - Name: "test", - } - - artifacts, err := builder.Build(context.Background(), cfg, []build.Target{}) - assert.Error(t, err) - assert.Nil(t, artifacts) - assert.Contains(t, err.Error(), "no targets specified") - }) -} - -func TestWailsBuilder_Build_Good(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test in short mode") - } - - // Check if wails3 is available in PATH - if _, err := exec.LookPath("wails3"); err != nil { - t.Skip("wails3 not installed, skipping integration test") - } - - t.Run("builds for current platform", func(t *testing.T) { - projectDir := setupWailsTestProject(t) - outputDir := t.TempDir() - - builder := NewWailsBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: outputDir, - Name: "testapp", - } - targets := []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, - } - - artifacts, err := builder.Build(context.Background(), cfg, targets) - require.NoError(t, err) - require.Len(t, artifacts, 1) - - // Verify artifact properties - artifact := artifacts[0] - assert.Equal(t, runtime.GOOS, artifact.OS) - assert.Equal(t, runtime.GOARCH, artifact.Arch) - }) -} - -func TestWailsBuilder_Interface_Good(t *testing.T) { - // Verify WailsBuilder implements Builder interface - var _ build.Builder = (*WailsBuilder)(nil) - var _ build.Builder = NewWailsBuilder() -} - -func TestWailsBuilder_Ugly(t *testing.T) { - t.Run("handles nonexistent frontend directory gracefully", func(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test in short mode") - } - - // Create a Wails project without a frontend directory - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "wails.json"), []byte("{}"), 0644) - require.NoError(t, err) - - builder := NewWailsBuilder() - cfg := &build.Config{ - ProjectDir: dir, - OutputDir: t.TempDir(), - Name: "test", - } - targets := []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, - } - - // This will fail because wails3 isn't set up, but it shouldn't panic - // due to missing frontend directory - _, err = builder.Build(context.Background(), cfg, targets) - // We expect an error (wails3 build will fail), but not a panic - // The error should be about wails3 build, not about frontend - if err != nil { - assert.NotContains(t, err.Error(), "frontend dependencies") - } - }) - - t.Run("handles context cancellation", func(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test in short mode") - } - - projectDir := setupWailsTestProject(t) - - builder := NewWailsBuilder() - cfg := &build.Config{ - ProjectDir: projectDir, - OutputDir: t.TempDir(), - Name: "canceltest", - } - targets := []build.Target{ - {OS: runtime.GOOS, Arch: runtime.GOARCH}, - } - - // Create an already cancelled context - ctx, cancel := context.WithCancel(context.Background()) - cancel() - - artifacts, err := builder.Build(ctx, cfg, targets) - assert.Error(t, err) - assert.Empty(t, artifacts) - }) -} diff --git a/pkg/build/checksum.go b/pkg/build/checksum.go deleted file mode 100644 index 926ac45..0000000 --- a/pkg/build/checksum.go +++ /dev/null @@ -1,102 +0,0 @@ -// Package build provides project type detection and cross-compilation for the Core build system. -package build - -import ( - "crypto/sha256" - "encoding/hex" - "fmt" - "io" - "os" - "path/filepath" - "sort" - "strings" -) - -// Checksum computes SHA256 for an artifact and returns the artifact with the Checksum field filled. -func Checksum(artifact Artifact) (Artifact, error) { - if artifact.Path == "" { - return Artifact{}, fmt.Errorf("build.Checksum: artifact path is empty") - } - - // Open the file - file, err := os.Open(artifact.Path) - if err != nil { - return Artifact{}, fmt.Errorf("build.Checksum: failed to open file: %w", err) - } - defer file.Close() - - // Compute SHA256 hash - hasher := sha256.New() - if _, err := io.Copy(hasher, file); err != nil { - return Artifact{}, fmt.Errorf("build.Checksum: failed to hash file: %w", err) - } - - checksum := hex.EncodeToString(hasher.Sum(nil)) - - return Artifact{ - Path: artifact.Path, - OS: artifact.OS, - Arch: artifact.Arch, - Checksum: checksum, - }, nil -} - -// ChecksumAll computes checksums for all artifacts. -// Returns a slice of artifacts with their Checksum fields filled. -func ChecksumAll(artifacts []Artifact) ([]Artifact, error) { - if len(artifacts) == 0 { - return nil, nil - } - - var checksummed []Artifact - for _, artifact := range artifacts { - cs, err := Checksum(artifact) - if err != nil { - return checksummed, fmt.Errorf("build.ChecksumAll: failed to checksum %s: %w", artifact.Path, err) - } - checksummed = append(checksummed, cs) - } - - return checksummed, nil -} - -// WriteChecksumFile writes a CHECKSUMS.txt file with the format: -// -// sha256hash filename1 -// sha256hash filename2 -// -// The artifacts should have their Checksum fields filled (call ChecksumAll first). -// Filenames are relative to the output directory (just the basename). -func WriteChecksumFile(artifacts []Artifact, path string) error { - if len(artifacts) == 0 { - return nil - } - - // Build the content - var lines []string - for _, artifact := range artifacts { - if artifact.Checksum == "" { - return fmt.Errorf("build.WriteChecksumFile: artifact %s has no checksum", artifact.Path) - } - filename := filepath.Base(artifact.Path) - lines = append(lines, fmt.Sprintf("%s %s", artifact.Checksum, filename)) - } - - // Sort lines for consistent output - sort.Strings(lines) - - content := strings.Join(lines, "\n") + "\n" - - // Ensure directory exists - dir := filepath.Dir(path) - if err := os.MkdirAll(dir, 0755); err != nil { - return fmt.Errorf("build.WriteChecksumFile: failed to create directory: %w", err) - } - - // Write the file - if err := os.WriteFile(path, []byte(content), 0644); err != nil { - return fmt.Errorf("build.WriteChecksumFile: failed to write file: %w", err) - } - - return nil -} diff --git a/pkg/build/checksum_test.go b/pkg/build/checksum_test.go deleted file mode 100644 index 499c67d..0000000 --- a/pkg/build/checksum_test.go +++ /dev/null @@ -1,275 +0,0 @@ -package build - -import ( - "os" - "path/filepath" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// setupChecksumTestFile creates a test file with known content. -func setupChecksumTestFile(t *testing.T, content string) string { - t.Helper() - - dir := t.TempDir() - path := filepath.Join(dir, "testfile") - err := os.WriteFile(path, []byte(content), 0644) - require.NoError(t, err) - - return path -} - -func TestChecksum_Good(t *testing.T) { - t.Run("computes SHA256 checksum", func(t *testing.T) { - // Known SHA256 of "Hello, World!\n" - path := setupChecksumTestFile(t, "Hello, World!\n") - expectedChecksum := "c98c24b677eff44860afea6f493bbaec5bb1c4cbb209c6fc2bbb47f66ff2ad31" - - artifact := Artifact{ - Path: path, - OS: "linux", - Arch: "amd64", - } - - result, err := Checksum(artifact) - require.NoError(t, err) - assert.Equal(t, expectedChecksum, result.Checksum) - }) - - t.Run("preserves artifact fields", func(t *testing.T) { - path := setupChecksumTestFile(t, "test content") - - artifact := Artifact{ - Path: path, - OS: "darwin", - Arch: "arm64", - } - - result, err := Checksum(artifact) - require.NoError(t, err) - - assert.Equal(t, path, result.Path) - assert.Equal(t, "darwin", result.OS) - assert.Equal(t, "arm64", result.Arch) - assert.NotEmpty(t, result.Checksum) - }) - - t.Run("produces 64 character hex string", func(t *testing.T) { - path := setupChecksumTestFile(t, "any content") - - artifact := Artifact{Path: path, OS: "linux", Arch: "amd64"} - - result, err := Checksum(artifact) - require.NoError(t, err) - - // SHA256 produces 32 bytes = 64 hex characters - assert.Len(t, result.Checksum, 64) - }) - - t.Run("different content produces different checksums", func(t *testing.T) { - path1 := setupChecksumTestFile(t, "content one") - path2 := setupChecksumTestFile(t, "content two") - - result1, err := Checksum(Artifact{Path: path1, OS: "linux", Arch: "amd64"}) - require.NoError(t, err) - - result2, err := Checksum(Artifact{Path: path2, OS: "linux", Arch: "amd64"}) - require.NoError(t, err) - - assert.NotEqual(t, result1.Checksum, result2.Checksum) - }) - - t.Run("same content produces same checksum", func(t *testing.T) { - content := "identical content" - path1 := setupChecksumTestFile(t, content) - path2 := setupChecksumTestFile(t, content) - - result1, err := Checksum(Artifact{Path: path1, OS: "linux", Arch: "amd64"}) - require.NoError(t, err) - - result2, err := Checksum(Artifact{Path: path2, OS: "linux", Arch: "amd64"}) - require.NoError(t, err) - - assert.Equal(t, result1.Checksum, result2.Checksum) - }) -} - -func TestChecksum_Bad(t *testing.T) { - t.Run("returns error for empty path", func(t *testing.T) { - artifact := Artifact{ - Path: "", - OS: "linux", - Arch: "amd64", - } - - result, err := Checksum(artifact) - assert.Error(t, err) - assert.Contains(t, err.Error(), "artifact path is empty") - assert.Empty(t, result.Checksum) - }) - - t.Run("returns error for non-existent file", func(t *testing.T) { - artifact := Artifact{ - Path: "/nonexistent/path/file", - OS: "linux", - Arch: "amd64", - } - - result, err := Checksum(artifact) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to open file") - assert.Empty(t, result.Checksum) - }) -} - -func TestChecksumAll_Good(t *testing.T) { - t.Run("checksums multiple artifacts", func(t *testing.T) { - paths := []string{ - setupChecksumTestFile(t, "content one"), - setupChecksumTestFile(t, "content two"), - setupChecksumTestFile(t, "content three"), - } - - artifacts := []Artifact{ - {Path: paths[0], OS: "linux", Arch: "amd64"}, - {Path: paths[1], OS: "darwin", Arch: "arm64"}, - {Path: paths[2], OS: "windows", Arch: "amd64"}, - } - - results, err := ChecksumAll(artifacts) - require.NoError(t, err) - require.Len(t, results, 3) - - for i, result := range results { - assert.Equal(t, artifacts[i].Path, result.Path) - assert.Equal(t, artifacts[i].OS, result.OS) - assert.Equal(t, artifacts[i].Arch, result.Arch) - assert.NotEmpty(t, result.Checksum) - } - }) - - t.Run("returns nil for empty slice", func(t *testing.T) { - results, err := ChecksumAll([]Artifact{}) - assert.NoError(t, err) - assert.Nil(t, results) - }) - - t.Run("returns nil for nil slice", func(t *testing.T) { - results, err := ChecksumAll(nil) - assert.NoError(t, err) - assert.Nil(t, results) - }) -} - -func TestChecksumAll_Bad(t *testing.T) { - t.Run("returns partial results on error", func(t *testing.T) { - path := setupChecksumTestFile(t, "valid content") - - artifacts := []Artifact{ - {Path: path, OS: "linux", Arch: "amd64"}, - {Path: "/nonexistent/file", OS: "linux", Arch: "arm64"}, // This will fail - } - - results, err := ChecksumAll(artifacts) - assert.Error(t, err) - // Should have the first successful result - assert.Len(t, results, 1) - assert.NotEmpty(t, results[0].Checksum) - }) -} - -func TestWriteChecksumFile_Good(t *testing.T) { - t.Run("writes checksum file with correct format", func(t *testing.T) { - dir := t.TempDir() - checksumPath := filepath.Join(dir, "CHECKSUMS.txt") - - artifacts := []Artifact{ - {Path: "/output/app_linux_amd64.tar.gz", Checksum: "abc123def456", OS: "linux", Arch: "amd64"}, - {Path: "/output/app_darwin_arm64.tar.gz", Checksum: "789xyz000111", OS: "darwin", Arch: "arm64"}, - } - - err := WriteChecksumFile(artifacts, checksumPath) - require.NoError(t, err) - - // Read and verify content - content, err := os.ReadFile(checksumPath) - require.NoError(t, err) - - lines := strings.Split(strings.TrimSpace(string(content)), "\n") - require.Len(t, lines, 2) - - // Lines should be sorted alphabetically - assert.Equal(t, "789xyz000111 app_darwin_arm64.tar.gz", lines[0]) - assert.Equal(t, "abc123def456 app_linux_amd64.tar.gz", lines[1]) - }) - - t.Run("creates parent directories", func(t *testing.T) { - dir := t.TempDir() - checksumPath := filepath.Join(dir, "nested", "deep", "CHECKSUMS.txt") - - artifacts := []Artifact{ - {Path: "/output/app.tar.gz", Checksum: "abc123", OS: "linux", Arch: "amd64"}, - } - - err := WriteChecksumFile(artifacts, checksumPath) - require.NoError(t, err) - assert.FileExists(t, checksumPath) - }) - - t.Run("does nothing for empty artifacts", func(t *testing.T) { - dir := t.TempDir() - checksumPath := filepath.Join(dir, "CHECKSUMS.txt") - - err := WriteChecksumFile([]Artifact{}, checksumPath) - require.NoError(t, err) - - // File should not exist - _, err = os.Stat(checksumPath) - assert.True(t, os.IsNotExist(err)) - }) - - t.Run("does nothing for nil artifacts", func(t *testing.T) { - dir := t.TempDir() - checksumPath := filepath.Join(dir, "CHECKSUMS.txt") - - err := WriteChecksumFile(nil, checksumPath) - require.NoError(t, err) - }) - - t.Run("uses only basename for filenames", func(t *testing.T) { - dir := t.TempDir() - checksumPath := filepath.Join(dir, "CHECKSUMS.txt") - - artifacts := []Artifact{ - {Path: "/some/deep/nested/path/myapp_linux_amd64.tar.gz", Checksum: "checksum123", OS: "linux", Arch: "amd64"}, - } - - err := WriteChecksumFile(artifacts, checksumPath) - require.NoError(t, err) - - content, err := os.ReadFile(checksumPath) - require.NoError(t, err) - - // Should only contain the basename - assert.Contains(t, string(content), "myapp_linux_amd64.tar.gz") - assert.NotContains(t, string(content), "/some/deep/nested/path/") - }) -} - -func TestWriteChecksumFile_Bad(t *testing.T) { - t.Run("returns error for artifact without checksum", func(t *testing.T) { - dir := t.TempDir() - checksumPath := filepath.Join(dir, "CHECKSUMS.txt") - - artifacts := []Artifact{ - {Path: "/output/app.tar.gz", Checksum: "", OS: "linux", Arch: "amd64"}, // No checksum - } - - err := WriteChecksumFile(artifacts, checksumPath) - assert.Error(t, err) - assert.Contains(t, err.Error(), "has no checksum") - }) -} diff --git a/pkg/build/config.go b/pkg/build/config.go deleted file mode 100644 index 5c6fb41..0000000 --- a/pkg/build/config.go +++ /dev/null @@ -1,168 +0,0 @@ -// Package build provides project type detection and cross-compilation for the Core build system. -// This file handles configuration loading from .core/build.yaml files. -package build - -import ( - "fmt" - "os" - "path/filepath" - - "github.com/host-uk/core/pkg/build/signing" - "gopkg.in/yaml.v3" -) - -// ConfigFileName is the name of the build configuration file. -const ConfigFileName = "build.yaml" - -// ConfigDir is the directory where build configuration is stored. -const ConfigDir = ".core" - -// BuildConfig holds the complete build configuration loaded from .core/build.yaml. -// This is distinct from Config which holds runtime build parameters. -type BuildConfig struct { - // Version is the config file format version. - Version int `yaml:"version"` - // Project contains project metadata. - Project Project `yaml:"project"` - // Build contains build settings. - Build Build `yaml:"build"` - // Targets defines the build targets. - Targets []TargetConfig `yaml:"targets"` - // Sign contains code signing configuration. - Sign signing.SignConfig `yaml:"sign,omitempty"` -} - -// Project holds project metadata. -type Project struct { - // Name is the project name. - Name string `yaml:"name"` - // Description is a brief description of the project. - Description string `yaml:"description"` - // Main is the path to the main package (e.g., ./cmd/core). - Main string `yaml:"main"` - // Binary is the output binary name. - Binary string `yaml:"binary"` -} - -// Build holds build-time settings. -type Build struct { - // CGO enables CGO for the build. - CGO bool `yaml:"cgo"` - // Flags are additional build flags (e.g., ["-trimpath"]). - Flags []string `yaml:"flags"` - // LDFlags are linker flags (e.g., ["-s", "-w"]). - LDFlags []string `yaml:"ldflags"` - // Env are additional environment variables. - Env []string `yaml:"env"` -} - -// TargetConfig defines a build target in the config file. -// This is separate from Target to allow for additional config-specific fields. -type TargetConfig struct { - // OS is the target operating system (e.g., "linux", "darwin", "windows"). - OS string `yaml:"os"` - // Arch is the target architecture (e.g., "amd64", "arm64"). - Arch string `yaml:"arch"` -} - -// LoadConfig loads build configuration from the .core/build.yaml file in the given directory. -// If the config file does not exist, it returns DefaultConfig(). -// Returns an error if the file exists but cannot be parsed. -func LoadConfig(dir string) (*BuildConfig, error) { - configPath := filepath.Join(dir, ConfigDir, ConfigFileName) - - data, err := os.ReadFile(configPath) - if err != nil { - if os.IsNotExist(err) { - return DefaultConfig(), nil - } - return nil, fmt.Errorf("build.LoadConfig: failed to read config file: %w", err) - } - - var cfg BuildConfig - if err := yaml.Unmarshal(data, &cfg); err != nil { - return nil, fmt.Errorf("build.LoadConfig: failed to parse config file: %w", err) - } - - // Apply defaults for any missing fields - applyDefaults(&cfg) - - return &cfg, nil -} - -// DefaultConfig returns sensible defaults for Go projects. -func DefaultConfig() *BuildConfig { - return &BuildConfig{ - Version: 1, - Project: Project{ - Name: "", - Main: ".", - Binary: "", - }, - Build: Build{ - CGO: false, - Flags: []string{"-trimpath"}, - LDFlags: []string{"-s", "-w"}, - Env: []string{}, - }, - Targets: []TargetConfig{ - {OS: "linux", Arch: "amd64"}, - {OS: "linux", Arch: "arm64"}, - {OS: "darwin", Arch: "amd64"}, - {OS: "darwin", Arch: "arm64"}, - {OS: "windows", Arch: "amd64"}, - }, - Sign: signing.DefaultSignConfig(), - } -} - -// applyDefaults fills in default values for any empty fields in the config. -func applyDefaults(cfg *BuildConfig) { - defaults := DefaultConfig() - - if cfg.Version == 0 { - cfg.Version = defaults.Version - } - - if cfg.Project.Main == "" { - cfg.Project.Main = defaults.Project.Main - } - - if cfg.Build.Flags == nil { - cfg.Build.Flags = defaults.Build.Flags - } - - if cfg.Build.LDFlags == nil { - cfg.Build.LDFlags = defaults.Build.LDFlags - } - - if cfg.Build.Env == nil { - cfg.Build.Env = defaults.Build.Env - } - - if len(cfg.Targets) == 0 { - cfg.Targets = defaults.Targets - } - - // Expand environment variables in sign config - cfg.Sign.ExpandEnv() -} - -// ConfigPath returns the path to the build config file for a given directory. -func ConfigPath(dir string) string { - return filepath.Join(dir, ConfigDir, ConfigFileName) -} - -// ConfigExists checks if a build config file exists in the given directory. -func ConfigExists(dir string) bool { - return fileExists(ConfigPath(dir)) -} - -// ToTargets converts TargetConfig slice to Target slice for use with builders. -func (cfg *BuildConfig) ToTargets() []Target { - targets := make([]Target, len(cfg.Targets)) - for i, t := range cfg.Targets { - targets[i] = Target{OS: t.OS, Arch: t.Arch} - } - return targets -} diff --git a/pkg/build/config_test.go b/pkg/build/config_test.go deleted file mode 100644 index 2723ce7..0000000 --- a/pkg/build/config_test.go +++ /dev/null @@ -1,316 +0,0 @@ -package build - -import ( - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// setupConfigTestDir creates a temp directory with optional .core/build.yaml content. -func setupConfigTestDir(t *testing.T, configContent string) string { - t.Helper() - dir := t.TempDir() - - if configContent != "" { - coreDir := filepath.Join(dir, ConfigDir) - err := os.MkdirAll(coreDir, 0755) - require.NoError(t, err) - - configPath := filepath.Join(coreDir, ConfigFileName) - err = os.WriteFile(configPath, []byte(configContent), 0644) - require.NoError(t, err) - } - - return dir -} - -func TestLoadConfig_Good(t *testing.T) { - t.Run("loads valid config", func(t *testing.T) { - content := ` -version: 1 -project: - name: myapp - description: A test application - main: ./cmd/myapp - binary: myapp -build: - cgo: true - flags: - - -trimpath - - -race - ldflags: - - -s - - -w - env: - - FOO=bar -targets: - - os: linux - arch: amd64 - - os: darwin - arch: arm64 -` - dir := setupConfigTestDir(t, content) - - cfg, err := LoadConfig(dir) - require.NoError(t, err) - require.NotNil(t, cfg) - - assert.Equal(t, 1, cfg.Version) - assert.Equal(t, "myapp", cfg.Project.Name) - assert.Equal(t, "A test application", cfg.Project.Description) - assert.Equal(t, "./cmd/myapp", cfg.Project.Main) - assert.Equal(t, "myapp", cfg.Project.Binary) - assert.True(t, cfg.Build.CGO) - assert.Equal(t, []string{"-trimpath", "-race"}, cfg.Build.Flags) - assert.Equal(t, []string{"-s", "-w"}, cfg.Build.LDFlags) - assert.Equal(t, []string{"FOO=bar"}, cfg.Build.Env) - assert.Len(t, cfg.Targets, 2) - assert.Equal(t, "linux", cfg.Targets[0].OS) - assert.Equal(t, "amd64", cfg.Targets[0].Arch) - assert.Equal(t, "darwin", cfg.Targets[1].OS) - assert.Equal(t, "arm64", cfg.Targets[1].Arch) - }) - - t.Run("returns defaults when config file missing", func(t *testing.T) { - dir := t.TempDir() - - cfg, err := LoadConfig(dir) - require.NoError(t, err) - require.NotNil(t, cfg) - - defaults := DefaultConfig() - assert.Equal(t, defaults.Version, cfg.Version) - assert.Equal(t, defaults.Project.Main, cfg.Project.Main) - assert.Equal(t, defaults.Build.CGO, cfg.Build.CGO) - assert.Equal(t, defaults.Build.Flags, cfg.Build.Flags) - assert.Equal(t, defaults.Build.LDFlags, cfg.Build.LDFlags) - assert.Equal(t, defaults.Targets, cfg.Targets) - }) - - t.Run("applies defaults for missing fields", func(t *testing.T) { - content := ` -version: 2 -project: - name: partial -` - dir := setupConfigTestDir(t, content) - - cfg, err := LoadConfig(dir) - require.NoError(t, err) - require.NotNil(t, cfg) - - // Explicit values preserved - assert.Equal(t, 2, cfg.Version) - assert.Equal(t, "partial", cfg.Project.Name) - - // Defaults applied - defaults := DefaultConfig() - assert.Equal(t, defaults.Project.Main, cfg.Project.Main) - assert.Equal(t, defaults.Build.Flags, cfg.Build.Flags) - assert.Equal(t, defaults.Build.LDFlags, cfg.Build.LDFlags) - assert.Equal(t, defaults.Targets, cfg.Targets) - }) - - t.Run("preserves empty arrays when explicitly set", func(t *testing.T) { - content := ` -version: 1 -project: - name: noflags -build: - flags: [] - ldflags: [] -targets: - - os: linux - arch: amd64 -` - dir := setupConfigTestDir(t, content) - - cfg, err := LoadConfig(dir) - require.NoError(t, err) - require.NotNil(t, cfg) - - // Empty arrays are preserved (not replaced with defaults) - assert.Empty(t, cfg.Build.Flags) - assert.Empty(t, cfg.Build.LDFlags) - // Targets explicitly set - assert.Len(t, cfg.Targets, 1) - }) -} - -func TestLoadConfig_Bad(t *testing.T) { - t.Run("returns error for invalid YAML", func(t *testing.T) { - content := ` -version: 1 -project: - name: [invalid yaml -` - dir := setupConfigTestDir(t, content) - - cfg, err := LoadConfig(dir) - assert.Error(t, err) - assert.Nil(t, cfg) - assert.Contains(t, err.Error(), "failed to parse config file") - }) - - t.Run("returns error for unreadable file", func(t *testing.T) { - dir := t.TempDir() - coreDir := filepath.Join(dir, ConfigDir) - err := os.MkdirAll(coreDir, 0755) - require.NoError(t, err) - - // Create config as a directory instead of file - configPath := filepath.Join(coreDir, ConfigFileName) - err = os.Mkdir(configPath, 0755) - require.NoError(t, err) - - cfg, err := LoadConfig(dir) - assert.Error(t, err) - assert.Nil(t, cfg) - assert.Contains(t, err.Error(), "failed to read config file") - }) -} - -func TestDefaultConfig_Good(t *testing.T) { - t.Run("returns sensible defaults", func(t *testing.T) { - cfg := DefaultConfig() - - assert.Equal(t, 1, cfg.Version) - assert.Equal(t, ".", cfg.Project.Main) - assert.Empty(t, cfg.Project.Name) - assert.Empty(t, cfg.Project.Binary) - assert.False(t, cfg.Build.CGO) - assert.Contains(t, cfg.Build.Flags, "-trimpath") - assert.Contains(t, cfg.Build.LDFlags, "-s") - assert.Contains(t, cfg.Build.LDFlags, "-w") - assert.Empty(t, cfg.Build.Env) - - // Default targets cover common platforms - assert.Len(t, cfg.Targets, 5) - hasLinuxAmd64 := false - hasDarwinArm64 := false - hasWindowsAmd64 := false - for _, t := range cfg.Targets { - if t.OS == "linux" && t.Arch == "amd64" { - hasLinuxAmd64 = true - } - if t.OS == "darwin" && t.Arch == "arm64" { - hasDarwinArm64 = true - } - if t.OS == "windows" && t.Arch == "amd64" { - hasWindowsAmd64 = true - } - } - assert.True(t, hasLinuxAmd64) - assert.True(t, hasDarwinArm64) - assert.True(t, hasWindowsAmd64) - }) -} - -func TestConfigPath_Good(t *testing.T) { - t.Run("returns correct path", func(t *testing.T) { - path := ConfigPath("/project/root") - assert.Equal(t, "/project/root/.core/build.yaml", path) - }) -} - -func TestConfigExists_Good(t *testing.T) { - t.Run("returns true when config exists", func(t *testing.T) { - dir := setupConfigTestDir(t, "version: 1") - assert.True(t, ConfigExists(dir)) - }) - - t.Run("returns false when config missing", func(t *testing.T) { - dir := t.TempDir() - assert.False(t, ConfigExists(dir)) - }) - - t.Run("returns false when .core dir missing", func(t *testing.T) { - dir := t.TempDir() - assert.False(t, ConfigExists(dir)) - }) -} - -func TestLoadConfig_Good_SignConfig(t *testing.T) { - tmpDir := t.TempDir() - coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) - - configContent := `version: 1 -sign: - enabled: true - gpg: - key: "ABCD1234" - macos: - identity: "Developer ID Application: Test" - notarize: true -` - os.WriteFile(filepath.Join(coreDir, "build.yaml"), []byte(configContent), 0644) - - cfg, err := LoadConfig(tmpDir) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - - if !cfg.Sign.Enabled { - t.Error("expected Sign.Enabled to be true") - } - if cfg.Sign.GPG.Key != "ABCD1234" { - t.Errorf("expected GPG.Key 'ABCD1234', got %q", cfg.Sign.GPG.Key) - } - if cfg.Sign.MacOS.Identity != "Developer ID Application: Test" { - t.Errorf("expected MacOS.Identity, got %q", cfg.Sign.MacOS.Identity) - } - if !cfg.Sign.MacOS.Notarize { - t.Error("expected MacOS.Notarize to be true") - } -} - -func TestBuildConfig_ToTargets_Good(t *testing.T) { - t.Run("converts TargetConfig to Target", func(t *testing.T) { - cfg := &BuildConfig{ - Targets: []TargetConfig{ - {OS: "linux", Arch: "amd64"}, - {OS: "darwin", Arch: "arm64"}, - {OS: "windows", Arch: "386"}, - }, - } - - targets := cfg.ToTargets() - require.Len(t, targets, 3) - - assert.Equal(t, Target{OS: "linux", Arch: "amd64"}, targets[0]) - assert.Equal(t, Target{OS: "darwin", Arch: "arm64"}, targets[1]) - assert.Equal(t, Target{OS: "windows", Arch: "386"}, targets[2]) - }) - - t.Run("returns empty slice for no targets", func(t *testing.T) { - cfg := &BuildConfig{ - Targets: []TargetConfig{}, - } - - targets := cfg.ToTargets() - assert.Empty(t, targets) - }) -} - -// TestLoadConfig_Testdata tests loading from the testdata fixture. -func TestLoadConfig_Testdata(t *testing.T) { - t.Run("loads config-project fixture", func(t *testing.T) { - cfg, err := LoadConfig("testdata/config-project") - require.NoError(t, err) - require.NotNil(t, cfg) - - assert.Equal(t, 1, cfg.Version) - assert.Equal(t, "example-cli", cfg.Project.Name) - assert.Equal(t, "An example CLI application", cfg.Project.Description) - assert.Equal(t, "./cmd/example", cfg.Project.Main) - assert.Equal(t, "example", cfg.Project.Binary) - assert.False(t, cfg.Build.CGO) - assert.Equal(t, []string{"-trimpath"}, cfg.Build.Flags) - assert.Equal(t, []string{"-s", "-w"}, cfg.Build.LDFlags) - assert.Len(t, cfg.Targets, 3) - }) -} diff --git a/pkg/build/discovery.go b/pkg/build/discovery.go deleted file mode 100644 index ba90b4d..0000000 --- a/pkg/build/discovery.go +++ /dev/null @@ -1,92 +0,0 @@ -package build - -import ( - "os" - "path/filepath" - "slices" -) - -// Marker files for project type detection. -const ( - markerGoMod = "go.mod" - markerWails = "wails.json" - markerNodePackage = "package.json" - markerComposer = "composer.json" -) - -// projectMarker maps a marker file to its project type. -type projectMarker struct { - file string - projectType ProjectType -} - -// markers defines the detection order. More specific types come first. -// Wails projects have both wails.json and go.mod, so wails is checked first. -var markers = []projectMarker{ - {markerWails, ProjectTypeWails}, - {markerGoMod, ProjectTypeGo}, - {markerNodePackage, ProjectTypeNode}, - {markerComposer, ProjectTypePHP}, -} - -// Discover detects project types in the given directory by checking for marker files. -// Returns a slice of detected project types, ordered by priority (most specific first). -// For example, a Wails project returns [wails, go] since it has both wails.json and go.mod. -func Discover(dir string) ([]ProjectType, error) { - var detected []ProjectType - - for _, m := range markers { - path := filepath.Join(dir, m.file) - if fileExists(path) { - // Avoid duplicates (shouldn't happen with current markers, but defensive) - if !slices.Contains(detected, m.projectType) { - detected = append(detected, m.projectType) - } - } - } - - return detected, nil -} - -// PrimaryType returns the most specific project type detected in the directory. -// Returns empty string if no project type is detected. -func PrimaryType(dir string) (ProjectType, error) { - types, err := Discover(dir) - if err != nil { - return "", err - } - if len(types) == 0 { - return "", nil - } - return types[0], nil -} - -// IsGoProject checks if the directory contains a Go project (go.mod or wails.json). -func IsGoProject(dir string) bool { - return fileExists(filepath.Join(dir, markerGoMod)) || - fileExists(filepath.Join(dir, markerWails)) -} - -// IsWailsProject checks if the directory contains a Wails project. -func IsWailsProject(dir string) bool { - return fileExists(filepath.Join(dir, markerWails)) -} - -// IsNodeProject checks if the directory contains a Node.js project. -func IsNodeProject(dir string) bool { - return fileExists(filepath.Join(dir, markerNodePackage)) -} - -// IsPHPProject checks if the directory contains a PHP project. -func IsPHPProject(dir string) bool { - return fileExists(filepath.Join(dir, markerComposer)) -} - -// fileExists checks if a file exists and is not a directory. -func fileExists(path string) bool { - info, err := os.Stat(path) - if err != nil { - return false - } - return !info.IsDir() -} diff --git a/pkg/build/discovery_test.go b/pkg/build/discovery_test.go deleted file mode 100644 index dc1a1f9..0000000 --- a/pkg/build/discovery_test.go +++ /dev/null @@ -1,217 +0,0 @@ -package build - -import ( - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// setupTestDir creates a temporary directory with the specified marker files. -func setupTestDir(t *testing.T, markers ...string) string { - t.Helper() - dir := t.TempDir() - for _, m := range markers { - path := filepath.Join(dir, m) - err := os.WriteFile(path, []byte("{}"), 0644) - require.NoError(t, err) - } - return dir -} - -func TestDiscover_Good(t *testing.T) { - t.Run("detects Go project", func(t *testing.T) { - dir := setupTestDir(t, "go.mod") - types, err := Discover(dir) - assert.NoError(t, err) - assert.Equal(t, []ProjectType{ProjectTypeGo}, types) - }) - - t.Run("detects Wails project with priority over Go", func(t *testing.T) { - dir := setupTestDir(t, "wails.json", "go.mod") - types, err := Discover(dir) - assert.NoError(t, err) - assert.Equal(t, []ProjectType{ProjectTypeWails, ProjectTypeGo}, types) - }) - - t.Run("detects Node.js project", func(t *testing.T) { - dir := setupTestDir(t, "package.json") - types, err := Discover(dir) - assert.NoError(t, err) - assert.Equal(t, []ProjectType{ProjectTypeNode}, types) - }) - - t.Run("detects PHP project", func(t *testing.T) { - dir := setupTestDir(t, "composer.json") - types, err := Discover(dir) - assert.NoError(t, err) - assert.Equal(t, []ProjectType{ProjectTypePHP}, types) - }) - - t.Run("detects multiple project types", func(t *testing.T) { - dir := setupTestDir(t, "go.mod", "package.json") - types, err := Discover(dir) - assert.NoError(t, err) - assert.Equal(t, []ProjectType{ProjectTypeGo, ProjectTypeNode}, types) - }) - - t.Run("empty directory returns empty slice", func(t *testing.T) { - dir := t.TempDir() - types, err := Discover(dir) - assert.NoError(t, err) - assert.Empty(t, types) - }) -} - -func TestDiscover_Bad(t *testing.T) { - t.Run("non-existent directory returns empty slice", func(t *testing.T) { - types, err := Discover("/non/existent/path") - assert.NoError(t, err) // os.Stat fails silently in fileExists - assert.Empty(t, types) - }) - - t.Run("directory marker is ignored", func(t *testing.T) { - dir := t.TempDir() - // Create go.mod as a directory instead of a file - err := os.Mkdir(filepath.Join(dir, "go.mod"), 0755) - require.NoError(t, err) - - types, err := Discover(dir) - assert.NoError(t, err) - assert.Empty(t, types) - }) -} - -func TestPrimaryType_Good(t *testing.T) { - t.Run("returns wails for wails project", func(t *testing.T) { - dir := setupTestDir(t, "wails.json", "go.mod") - primary, err := PrimaryType(dir) - assert.NoError(t, err) - assert.Equal(t, ProjectTypeWails, primary) - }) - - t.Run("returns go for go-only project", func(t *testing.T) { - dir := setupTestDir(t, "go.mod") - primary, err := PrimaryType(dir) - assert.NoError(t, err) - assert.Equal(t, ProjectTypeGo, primary) - }) - - t.Run("returns empty string for empty directory", func(t *testing.T) { - dir := t.TempDir() - primary, err := PrimaryType(dir) - assert.NoError(t, err) - assert.Empty(t, primary) - }) -} - -func TestIsGoProject_Good(t *testing.T) { - t.Run("true with go.mod", func(t *testing.T) { - dir := setupTestDir(t, "go.mod") - assert.True(t, IsGoProject(dir)) - }) - - t.Run("true with wails.json", func(t *testing.T) { - dir := setupTestDir(t, "wails.json") - assert.True(t, IsGoProject(dir)) - }) - - t.Run("false without markers", func(t *testing.T) { - dir := t.TempDir() - assert.False(t, IsGoProject(dir)) - }) -} - -func TestIsWailsProject_Good(t *testing.T) { - t.Run("true with wails.json", func(t *testing.T) { - dir := setupTestDir(t, "wails.json") - assert.True(t, IsWailsProject(dir)) - }) - - t.Run("false with only go.mod", func(t *testing.T) { - dir := setupTestDir(t, "go.mod") - assert.False(t, IsWailsProject(dir)) - }) -} - -func TestIsNodeProject_Good(t *testing.T) { - t.Run("true with package.json", func(t *testing.T) { - dir := setupTestDir(t, "package.json") - assert.True(t, IsNodeProject(dir)) - }) - - t.Run("false without package.json", func(t *testing.T) { - dir := t.TempDir() - assert.False(t, IsNodeProject(dir)) - }) -} - -func TestIsPHPProject_Good(t *testing.T) { - t.Run("true with composer.json", func(t *testing.T) { - dir := setupTestDir(t, "composer.json") - assert.True(t, IsPHPProject(dir)) - }) - - t.Run("false without composer.json", func(t *testing.T) { - dir := t.TempDir() - assert.False(t, IsPHPProject(dir)) - }) -} - -func TestTarget_Good(t *testing.T) { - target := Target{OS: "linux", Arch: "amd64"} - assert.Equal(t, "linux/amd64", target.String()) -} - -func TestFileExists_Good(t *testing.T) { - t.Run("returns true for existing file", func(t *testing.T) { - dir := t.TempDir() - path := filepath.Join(dir, "test.txt") - err := os.WriteFile(path, []byte("content"), 0644) - require.NoError(t, err) - assert.True(t, fileExists(path)) - }) - - t.Run("returns false for directory", func(t *testing.T) { - dir := t.TempDir() - assert.False(t, fileExists(dir)) - }) - - t.Run("returns false for non-existent path", func(t *testing.T) { - assert.False(t, fileExists("/non/existent/file")) - }) -} - -// TestDiscover_Testdata tests discovery using the testdata fixtures. -// These serve as integration tests with realistic project structures. -func TestDiscover_Testdata(t *testing.T) { - testdataDir := "testdata" - - tests := []struct { - name string - dir string - expected []ProjectType - }{ - {"go-project", "go-project", []ProjectType{ProjectTypeGo}}, - {"wails-project", "wails-project", []ProjectType{ProjectTypeWails, ProjectTypeGo}}, - {"node-project", "node-project", []ProjectType{ProjectTypeNode}}, - {"php-project", "php-project", []ProjectType{ProjectTypePHP}}, - {"multi-project", "multi-project", []ProjectType{ProjectTypeGo, ProjectTypeNode}}, - {"empty-project", "empty-project", []ProjectType{}}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - dir := filepath.Join(testdataDir, tt.dir) - types, err := Discover(dir) - assert.NoError(t, err) - if len(tt.expected) == 0 { - assert.Empty(t, types) - } else { - assert.Equal(t, tt.expected, types) - } - }) - } -} diff --git a/pkg/build/signing/codesign.go b/pkg/build/signing/codesign.go deleted file mode 100644 index 4b55bb5..0000000 --- a/pkg/build/signing/codesign.go +++ /dev/null @@ -1,102 +0,0 @@ -package signing - -import ( - "context" - "fmt" - "os" - "os/exec" - "runtime" -) - -// MacOSSigner signs binaries using macOS codesign. -type MacOSSigner struct { - config MacOSConfig -} - -// Compile-time interface check. -var _ Signer = (*MacOSSigner)(nil) - -// NewMacOSSigner creates a new macOS signer. -func NewMacOSSigner(cfg MacOSConfig) *MacOSSigner { - return &MacOSSigner{config: cfg} -} - -// Name returns "codesign". -func (s *MacOSSigner) Name() string { - return "codesign" -} - -// Available checks if running on macOS with codesign and identity configured. -func (s *MacOSSigner) Available() bool { - if runtime.GOOS != "darwin" { - return false - } - if s.config.Identity == "" { - return false - } - _, err := exec.LookPath("codesign") - return err == nil -} - -// Sign codesigns a binary with hardened runtime. -func (s *MacOSSigner) Sign(ctx context.Context, binary string) error { - if !s.Available() { - return fmt.Errorf("codesign.Sign: codesign not available") - } - - cmd := exec.CommandContext(ctx, "codesign", - "--sign", s.config.Identity, - "--timestamp", - "--options", "runtime", // Hardened runtime for notarization - "--force", - binary, - ) - - output, err := cmd.CombinedOutput() - if err != nil { - return fmt.Errorf("codesign.Sign: %w\nOutput: %s", err, string(output)) - } - - return nil -} - -// Notarize submits binary to Apple for notarization and staples the ticket. -// This blocks until Apple responds (typically 1-5 minutes). -func (s *MacOSSigner) Notarize(ctx context.Context, binary string) error { - if s.config.AppleID == "" || s.config.TeamID == "" || s.config.AppPassword == "" { - return fmt.Errorf("codesign.Notarize: missing Apple credentials (apple_id, team_id, app_password)") - } - - // Create ZIP for submission - zipPath := binary + ".zip" - zipCmd := exec.CommandContext(ctx, "zip", "-j", zipPath, binary) - if output, err := zipCmd.CombinedOutput(); err != nil { - return fmt.Errorf("codesign.Notarize: failed to create zip: %w\nOutput: %s", err, string(output)) - } - defer os.Remove(zipPath) - - // Submit to Apple and wait - submitCmd := exec.CommandContext(ctx, "xcrun", "notarytool", "submit", - zipPath, - "--apple-id", s.config.AppleID, - "--team-id", s.config.TeamID, - "--password", s.config.AppPassword, - "--wait", - ) - if output, err := submitCmd.CombinedOutput(); err != nil { - return fmt.Errorf("codesign.Notarize: notarization failed: %w\nOutput: %s", err, string(output)) - } - - // Staple the ticket - stapleCmd := exec.CommandContext(ctx, "xcrun", "stapler", "staple", binary) - if output, err := stapleCmd.CombinedOutput(); err != nil { - return fmt.Errorf("codesign.Notarize: failed to staple: %w\nOutput: %s", err, string(output)) - } - - return nil -} - -// ShouldNotarize returns true if notarization is enabled. -func (s *MacOSSigner) ShouldNotarize() bool { - return s.config.Notarize -} diff --git a/pkg/build/signing/codesign_test.go b/pkg/build/signing/codesign_test.go deleted file mode 100644 index ecbd40f..0000000 --- a/pkg/build/signing/codesign_test.go +++ /dev/null @@ -1,59 +0,0 @@ -package signing - -import ( - "context" - "runtime" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestMacOSSigner_Good_Name(t *testing.T) { - s := NewMacOSSigner(MacOSConfig{Identity: "Developer ID Application: Test"}) - assert.Equal(t, "codesign", s.Name()) -} - -func TestMacOSSigner_Good_Available(t *testing.T) { - s := NewMacOSSigner(MacOSConfig{Identity: "Developer ID Application: Test"}) - - if runtime.GOOS == "darwin" { - // Just verify it doesn't panic - _ = s.Available() - } else { - assert.False(t, s.Available()) - } -} - -func TestMacOSSigner_Bad_NoIdentity(t *testing.T) { - s := NewMacOSSigner(MacOSConfig{}) - assert.False(t, s.Available()) -} - -func TestMacOSSigner_Sign_Bad(t *testing.T) { - t.Run("fails when not available", func(t *testing.T) { - if runtime.GOOS == "darwin" { - t.Skip("skipping on macOS") - } - s := NewMacOSSigner(MacOSConfig{Identity: "test"}) - err := s.Sign(context.Background(), "test") - assert.Error(t, err) - assert.Contains(t, err.Error(), "not available") - }) -} - -func TestMacOSSigner_Notarize_Bad(t *testing.T) { - t.Run("fails with missing credentials", func(t *testing.T) { - s := NewMacOSSigner(MacOSConfig{}) - err := s.Notarize(context.Background(), "test") - assert.Error(t, err) - assert.Contains(t, err.Error(), "missing Apple credentials") - }) -} - -func TestMacOSSigner_ShouldNotarize(t *testing.T) { - s := NewMacOSSigner(MacOSConfig{Notarize: true}) - assert.True(t, s.ShouldNotarize()) - - s2 := NewMacOSSigner(MacOSConfig{Notarize: false}) - assert.False(t, s2.ShouldNotarize()) -} \ No newline at end of file diff --git a/pkg/build/signing/gpg.go b/pkg/build/signing/gpg.go deleted file mode 100644 index 80f48fb..0000000 --- a/pkg/build/signing/gpg.go +++ /dev/null @@ -1,57 +0,0 @@ -package signing - -import ( - "context" - "fmt" - "os/exec" -) - -// GPGSigner signs files using GPG. -type GPGSigner struct { - KeyID string -} - -// Compile-time interface check. -var _ Signer = (*GPGSigner)(nil) - -// NewGPGSigner creates a new GPG signer. -func NewGPGSigner(keyID string) *GPGSigner { - return &GPGSigner{KeyID: keyID} -} - -// Name returns "gpg". -func (s *GPGSigner) Name() string { - return "gpg" -} - -// Available checks if gpg is installed and key is configured. -func (s *GPGSigner) Available() bool { - if s.KeyID == "" { - return false - } - _, err := exec.LookPath("gpg") - return err == nil -} - -// Sign creates a detached ASCII-armored signature. -// For file.txt, creates file.txt.asc -func (s *GPGSigner) Sign(ctx context.Context, file string) error { - if !s.Available() { - return fmt.Errorf("gpg.Sign: gpg not available or key not configured") - } - - cmd := exec.CommandContext(ctx, "gpg", - "--detach-sign", - "--armor", - "--local-user", s.KeyID, - "--output", file+".asc", - file, - ) - - output, err := cmd.CombinedOutput() - if err != nil { - return fmt.Errorf("gpg.Sign: %w\nOutput: %s", err, string(output)) - } - - return nil -} diff --git a/pkg/build/signing/gpg_test.go b/pkg/build/signing/gpg_test.go deleted file mode 100644 index f53aac0..0000000 --- a/pkg/build/signing/gpg_test.go +++ /dev/null @@ -1,32 +0,0 @@ -package signing - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGPGSigner_Good_Name(t *testing.T) { - s := NewGPGSigner("ABCD1234") - assert.Equal(t, "gpg", s.Name()) -} - -func TestGPGSigner_Good_Available(t *testing.T) { - s := NewGPGSigner("ABCD1234") - _ = s.Available() -} - -func TestGPGSigner_Bad_NoKey(t *testing.T) { - s := NewGPGSigner("") - assert.False(t, s.Available()) -} - -func TestGPGSigner_Sign_Bad(t *testing.T) { - t.Run("fails when no key", func(t *testing.T) { - s := NewGPGSigner("") - err := s.Sign(context.Background(), "test.txt") - assert.Error(t, err) - assert.Contains(t, err.Error(), "not available or key not configured") - }) -} \ No newline at end of file diff --git a/pkg/build/signing/sign.go b/pkg/build/signing/sign.go deleted file mode 100644 index 65e82c9..0000000 --- a/pkg/build/signing/sign.go +++ /dev/null @@ -1,94 +0,0 @@ -package signing - -import ( - "context" - "fmt" - "runtime" -) - -// Artifact represents a build output that can be signed. -// This mirrors build.Artifact to avoid import cycles. -type Artifact struct { - Path string - OS string - Arch string -} - -// SignBinaries signs macOS binaries in the artifacts list. -// Only signs darwin binaries when running on macOS with a configured identity. -func SignBinaries(ctx context.Context, cfg SignConfig, artifacts []Artifact) error { - if !cfg.Enabled { - return nil - } - - // Only sign on macOS - if runtime.GOOS != "darwin" { - return nil - } - - signer := NewMacOSSigner(cfg.MacOS) - if !signer.Available() { - return nil // Silently skip if not configured - } - - for _, artifact := range artifacts { - if artifact.OS != "darwin" { - continue - } - - fmt.Printf(" Signing %s...\n", artifact.Path) - if err := signer.Sign(ctx, artifact.Path); err != nil { - return fmt.Errorf("failed to sign %s: %w", artifact.Path, err) - } - } - - return nil -} - -// NotarizeBinaries notarizes macOS binaries if enabled. -func NotarizeBinaries(ctx context.Context, cfg SignConfig, artifacts []Artifact) error { - if !cfg.Enabled || !cfg.MacOS.Notarize { - return nil - } - - if runtime.GOOS != "darwin" { - return nil - } - - signer := NewMacOSSigner(cfg.MacOS) - if !signer.Available() { - return fmt.Errorf("notarization requested but codesign not available") - } - - for _, artifact := range artifacts { - if artifact.OS != "darwin" { - continue - } - - fmt.Printf(" Notarizing %s (this may take a few minutes)...\n", artifact.Path) - if err := signer.Notarize(ctx, artifact.Path); err != nil { - return fmt.Errorf("failed to notarize %s: %w", artifact.Path, err) - } - } - - return nil -} - -// SignChecksums signs the checksums file with GPG. -func SignChecksums(ctx context.Context, cfg SignConfig, checksumFile string) error { - if !cfg.Enabled { - return nil - } - - signer := NewGPGSigner(cfg.GPG.Key) - if !signer.Available() { - return nil // Silently skip if not configured - } - - fmt.Printf(" Signing %s with GPG...\n", checksumFile) - if err := signer.Sign(ctx, checksumFile); err != nil { - return fmt.Errorf("failed to sign checksums: %w", err) - } - - return nil -} diff --git a/pkg/build/signing/signer.go b/pkg/build/signing/signer.go deleted file mode 100644 index 80213a9..0000000 --- a/pkg/build/signing/signer.go +++ /dev/null @@ -1,81 +0,0 @@ -// Package signing provides code signing for build artifacts. -package signing - -import ( - "context" - "os" - "strings" -) - -// Signer defines the interface for code signing implementations. -type Signer interface { - // Name returns the signer's identifier. - Name() string - // Available checks if this signer can be used. - Available() bool - // Sign signs the artifact at the given path. - Sign(ctx context.Context, path string) error -} - -// SignConfig holds signing configuration from .core/build.yaml. -type SignConfig struct { - Enabled bool `yaml:"enabled"` - GPG GPGConfig `yaml:"gpg,omitempty"` - MacOS MacOSConfig `yaml:"macos,omitempty"` - Windows WindowsConfig `yaml:"windows,omitempty"` -} - -// GPGConfig holds GPG signing configuration. -type GPGConfig struct { - Key string `yaml:"key"` // Key ID or fingerprint, supports $ENV -} - -// MacOSConfig holds macOS codesign configuration. -type MacOSConfig struct { - Identity string `yaml:"identity"` // Developer ID Application: ... - Notarize bool `yaml:"notarize"` // Submit to Apple for notarization - AppleID string `yaml:"apple_id"` // Apple account email - TeamID string `yaml:"team_id"` // Team ID - AppPassword string `yaml:"app_password"` // App-specific password -} - -// WindowsConfig holds Windows signtool configuration (placeholder). -type WindowsConfig struct { - Certificate string `yaml:"certificate"` // Path to .pfx - Password string `yaml:"password"` // Certificate password -} - -// DefaultSignConfig returns sensible defaults. -func DefaultSignConfig() SignConfig { - return SignConfig{ - Enabled: true, - GPG: GPGConfig{ - Key: os.Getenv("GPG_KEY_ID"), - }, - MacOS: MacOSConfig{ - Identity: os.Getenv("CODESIGN_IDENTITY"), - AppleID: os.Getenv("APPLE_ID"), - TeamID: os.Getenv("APPLE_TEAM_ID"), - AppPassword: os.Getenv("APPLE_APP_PASSWORD"), - }, - } -} - -// ExpandEnv expands environment variables in config values. -func (c *SignConfig) ExpandEnv() { - c.GPG.Key = expandEnv(c.GPG.Key) - c.MacOS.Identity = expandEnv(c.MacOS.Identity) - c.MacOS.AppleID = expandEnv(c.MacOS.AppleID) - c.MacOS.TeamID = expandEnv(c.MacOS.TeamID) - c.MacOS.AppPassword = expandEnv(c.MacOS.AppPassword) - c.Windows.Certificate = expandEnv(c.Windows.Certificate) - c.Windows.Password = expandEnv(c.Windows.Password) -} - -// expandEnv expands $VAR or ${VAR} in a string. -func expandEnv(s string) string { - if strings.HasPrefix(s, "$") { - return os.ExpandEnv(s) - } - return s -} diff --git a/pkg/build/signing/signing_test.go b/pkg/build/signing/signing_test.go deleted file mode 100644 index 90a09ee..0000000 --- a/pkg/build/signing/signing_test.go +++ /dev/null @@ -1,153 +0,0 @@ -package signing - -import ( - "context" - "runtime" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestSignBinaries_Good_SkipsNonDarwin(t *testing.T) { - ctx := context.Background() - cfg := SignConfig{ - Enabled: true, - MacOS: MacOSConfig{ - Identity: "Developer ID Application: Test", - }, - } - - // Create fake artifact for linux - artifacts := []Artifact{ - {Path: "/tmp/test-binary", OS: "linux", Arch: "amd64"}, - } - - // Should not error even though binary doesn't exist (skips non-darwin) - err := SignBinaries(ctx, cfg, artifacts) - if err != nil { - t.Errorf("unexpected error: %v", err) - } -} - -func TestSignBinaries_Good_DisabledConfig(t *testing.T) { - ctx := context.Background() - cfg := SignConfig{ - Enabled: false, - } - - artifacts := []Artifact{ - {Path: "/tmp/test-binary", OS: "darwin", Arch: "arm64"}, - } - - err := SignBinaries(ctx, cfg, artifacts) - if err != nil { - t.Errorf("unexpected error: %v", err) - } -} - -func TestSignBinaries_Good_SkipsOnNonMacOS(t *testing.T) { - if runtime.GOOS == "darwin" { - t.Skip("Skipping on macOS - this tests non-macOS behavior") - } - - ctx := context.Background() - cfg := SignConfig{ - Enabled: true, - MacOS: MacOSConfig{ - Identity: "Developer ID Application: Test", - }, - } - - artifacts := []Artifact{ - {Path: "/tmp/test-binary", OS: "darwin", Arch: "arm64"}, - } - - err := SignBinaries(ctx, cfg, artifacts) - if err != nil { - t.Errorf("unexpected error: %v", err) - } -} - -func TestNotarizeBinaries_Good_DisabledConfig(t *testing.T) { - ctx := context.Background() - cfg := SignConfig{ - Enabled: false, - } - - artifacts := []Artifact{ - {Path: "/tmp/test-binary", OS: "darwin", Arch: "arm64"}, - } - - err := NotarizeBinaries(ctx, cfg, artifacts) - if err != nil { - t.Errorf("unexpected error: %v", err) - } -} - -func TestNotarizeBinaries_Good_NotarizeDisabled(t *testing.T) { - ctx := context.Background() - cfg := SignConfig{ - Enabled: true, - MacOS: MacOSConfig{ - Notarize: false, - }, - } - - artifacts := []Artifact{ - {Path: "/tmp/test-binary", OS: "darwin", Arch: "arm64"}, - } - - err := NotarizeBinaries(ctx, cfg, artifacts) - if err != nil { - t.Errorf("unexpected error: %v", err) - } -} - -func TestSignChecksums_Good_SkipsNoKey(t *testing.T) { - ctx := context.Background() - cfg := SignConfig{ - Enabled: true, - GPG: GPGConfig{ - Key: "", // No key configured - }, - } - - // Should silently skip when no key - err := SignChecksums(ctx, cfg, "/tmp/CHECKSUMS.txt") - if err != nil { - t.Errorf("unexpected error: %v", err) - } -} - -func TestSignChecksums_Good_Disabled(t *testing.T) { - ctx := context.Background() - cfg := SignConfig{ - Enabled: false, - } - - err := SignChecksums(ctx, cfg, "/tmp/CHECKSUMS.txt") - if err != nil { - t.Errorf("unexpected error: %v", err) - } -} - -func TestDefaultSignConfig(t *testing.T) { - cfg := DefaultSignConfig() - assert.True(t, cfg.Enabled) -} - -func TestSignConfig_ExpandEnv(t *testing.T) { - t.Setenv("TEST_KEY", "ABC") - cfg := SignConfig{ - GPG: GPGConfig{Key: "$TEST_KEY"}, - } - cfg.ExpandEnv() - assert.Equal(t, "ABC", cfg.GPG.Key) -} - -func TestWindowsSigner_Good(t *testing.T) { - s := NewWindowsSigner(WindowsConfig{}) - assert.Equal(t, "signtool", s.Name()) - assert.False(t, s.Available()) - assert.NoError(t, s.Sign(context.Background(), "test.exe")) -} diff --git a/pkg/build/signing/signtool.go b/pkg/build/signing/signtool.go deleted file mode 100644 index 9d426b6..0000000 --- a/pkg/build/signing/signtool.go +++ /dev/null @@ -1,34 +0,0 @@ -package signing - -import ( - "context" -) - -// WindowsSigner signs binaries using Windows signtool (placeholder). -type WindowsSigner struct { - config WindowsConfig -} - -// Compile-time interface check. -var _ Signer = (*WindowsSigner)(nil) - -// NewWindowsSigner creates a new Windows signer. -func NewWindowsSigner(cfg WindowsConfig) *WindowsSigner { - return &WindowsSigner{config: cfg} -} - -// Name returns "signtool". -func (s *WindowsSigner) Name() string { - return "signtool" -} - -// Available returns false (not yet implemented). -func (s *WindowsSigner) Available() bool { - return false -} - -// Sign is a placeholder that does nothing. -func (s *WindowsSigner) Sign(ctx context.Context, binary string) error { - // TODO: Implement Windows signing - return nil -} diff --git a/pkg/build/testdata/config-project/.core/build.yaml b/pkg/build/testdata/config-project/.core/build.yaml deleted file mode 100644 index ff3a997..0000000 --- a/pkg/build/testdata/config-project/.core/build.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# Example build configuration for Core build system -version: 1 - -project: - name: example-cli - description: An example CLI application - main: ./cmd/example - binary: example - -build: - cgo: false - flags: - - -trimpath - ldflags: - - -s - - -w - env: [] - -targets: - - os: linux - arch: amd64 - - os: darwin - arch: arm64 - - os: windows - arch: amd64 diff --git a/pkg/build/testdata/empty-project/.gitkeep b/pkg/build/testdata/empty-project/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/pkg/build/testdata/go-project/go.mod b/pkg/build/testdata/go-project/go.mod deleted file mode 100644 index deedf38..0000000 --- a/pkg/build/testdata/go-project/go.mod +++ /dev/null @@ -1,3 +0,0 @@ -module example.com/go-project - -go 1.21 diff --git a/pkg/build/testdata/multi-project/go.mod b/pkg/build/testdata/multi-project/go.mod deleted file mode 100644 index f45e24d..0000000 --- a/pkg/build/testdata/multi-project/go.mod +++ /dev/null @@ -1,3 +0,0 @@ -module example.com/multi-project - -go 1.21 diff --git a/pkg/build/testdata/multi-project/package.json b/pkg/build/testdata/multi-project/package.json deleted file mode 100644 index 18c5954..0000000 --- a/pkg/build/testdata/multi-project/package.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "multi-project", - "version": "1.0.0" -} diff --git a/pkg/build/testdata/node-project/package.json b/pkg/build/testdata/node-project/package.json deleted file mode 100644 index 6d873ce..0000000 --- a/pkg/build/testdata/node-project/package.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "node-project", - "version": "1.0.0" -} diff --git a/pkg/build/testdata/php-project/composer.json b/pkg/build/testdata/php-project/composer.json deleted file mode 100644 index 962108e..0000000 --- a/pkg/build/testdata/php-project/composer.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "vendor/php-project", - "type": "library" -} diff --git a/pkg/build/testdata/wails-project/go.mod b/pkg/build/testdata/wails-project/go.mod deleted file mode 100644 index e4daed1..0000000 --- a/pkg/build/testdata/wails-project/go.mod +++ /dev/null @@ -1,3 +0,0 @@ -module example.com/wails-project - -go 1.21 diff --git a/pkg/build/testdata/wails-project/wails.json b/pkg/build/testdata/wails-project/wails.json deleted file mode 100644 index aaa778f..0000000 --- a/pkg/build/testdata/wails-project/wails.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "wails-project", - "outputfilename": "wails-project" -} diff --git a/pkg/cache/cache.go b/pkg/cache/cache.go deleted file mode 100644 index 6081fc3..0000000 --- a/pkg/cache/cache.go +++ /dev/null @@ -1,161 +0,0 @@ -// Package cache provides a file-based cache for GitHub API responses. -package cache - -import ( - "encoding/json" - "os" - "path/filepath" - "time" -) - -// DefaultTTL is the default cache expiry time. -const DefaultTTL = 1 * time.Hour - -// Cache represents a file-based cache. -type Cache struct { - baseDir string - ttl time.Duration -} - -// Entry represents a cached item with metadata. -type Entry struct { - Data json.RawMessage `json:"data"` - CachedAt time.Time `json:"cached_at"` - ExpiresAt time.Time `json:"expires_at"` -} - -// New creates a new cache instance. -// If baseDir is empty, uses .core/cache in current directory -func New(baseDir string, ttl time.Duration) (*Cache, error) { - if baseDir == "" { - // Use .core/cache in current working directory - cwd, err := os.Getwd() - if err != nil { - return nil, err - } - baseDir = filepath.Join(cwd, ".core", "cache") - } - - if ttl == 0 { - ttl = DefaultTTL - } - - // Ensure cache directory exists - if err := os.MkdirAll(baseDir, 0755); err != nil { - return nil, err - } - - return &Cache{ - baseDir: baseDir, - ttl: ttl, - }, nil -} - -// Path returns the full path for a cache key. -func (c *Cache) Path(key string) string { - return filepath.Join(c.baseDir, key+".json") -} - -// Get retrieves a cached item if it exists and hasn't expired. -func (c *Cache) Get(key string, dest interface{}) (bool, error) { - path := c.Path(key) - - data, err := os.ReadFile(path) - if err != nil { - if os.IsNotExist(err) { - return false, nil - } - return false, err - } - - var entry Entry - if err := json.Unmarshal(data, &entry); err != nil { - // Invalid cache file, treat as miss - return false, nil - } - - // Check expiry - if time.Now().After(entry.ExpiresAt) { - return false, nil - } - - // Unmarshal the actual data - if err := json.Unmarshal(entry.Data, dest); err != nil { - return false, err - } - - return true, nil -} - -// Set stores an item in the cache. -func (c *Cache) Set(key string, data interface{}) error { - path := c.Path(key) - - // Ensure parent directory exists - if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil { - return err - } - - // Marshal the data - dataBytes, err := json.Marshal(data) - if err != nil { - return err - } - - entry := Entry{ - Data: dataBytes, - CachedAt: time.Now(), - ExpiresAt: time.Now().Add(c.ttl), - } - - entryBytes, err := json.MarshalIndent(entry, "", " ") - if err != nil { - return err - } - - return os.WriteFile(path, entryBytes, 0644) -} - -// Delete removes an item from the cache. -func (c *Cache) Delete(key string) error { - path := c.Path(key) - err := os.Remove(path) - if os.IsNotExist(err) { - return nil - } - return err -} - -// Clear removes all cached items. -func (c *Cache) Clear() error { - return os.RemoveAll(c.baseDir) -} - -// Age returns how old a cached item is, or -1 if not cached. -func (c *Cache) Age(key string) time.Duration { - path := c.Path(key) - - data, err := os.ReadFile(path) - if err != nil { - return -1 - } - - var entry Entry - if err := json.Unmarshal(data, &entry); err != nil { - return -1 - } - - return time.Since(entry.CachedAt) -} - -// GitHub-specific cache keys - -// GitHubReposKey returns the cache key for an org's repo list. -func GitHubReposKey(org string) string { - return filepath.Join("github", org, "repos") -} - -// GitHubRepoKey returns the cache key for a specific repo's metadata. -func GitHubRepoKey(org, repo string) string { - return filepath.Join("github", org, repo, "meta") -} diff --git a/pkg/ci/cmd_changelog.go b/pkg/ci/cmd_changelog.go deleted file mode 100644 index 6904cb8..0000000 --- a/pkg/ci/cmd_changelog.go +++ /dev/null @@ -1,57 +0,0 @@ -package ci - -import ( - "os" - "os/exec" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/release" -) - -func runChangelog(fromRef, toRef string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - // Auto-detect refs if not provided - if fromRef == "" || toRef == "" { - tag, err := latestTag(cwd) - if err == nil { - if fromRef == "" { - fromRef = tag - } - if toRef == "" { - toRef = "HEAD" - } - } else { - // No tags, use initial commit? Or just HEAD? - cli.Text(i18n.T("cmd.ci.changelog.no_tags")) - return nil - } - } - - cli.Print("%s %s..%s\n\n", releaseDimStyle.Render(i18n.T("cmd.ci.changelog.generating")), fromRef, toRef) - - // Generate changelog - changelog, err := release.Generate(cwd, fromRef, toRef) - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.generate", "changelog"), err) - } - - cli.Text(changelog) - - return nil -} - -func latestTag(dir string) (string, error) { - cmd := exec.Command("git", "describe", "--tags", "--abbrev=0") - cmd.Dir = dir - out, err := cmd.Output() - if err != nil { - return "", err - } - return strings.TrimSpace(string(out)), nil -} \ No newline at end of file diff --git a/pkg/ci/cmd_ci.go b/pkg/ci/cmd_ci.go deleted file mode 100644 index 097ceed..0000000 --- a/pkg/ci/cmd_ci.go +++ /dev/null @@ -1,84 +0,0 @@ -// Package ci provides release publishing commands. -package ci - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -// Style aliases from shared -var ( - releaseHeaderStyle = cli.RepoStyle - releaseSuccessStyle = cli.SuccessStyle - releaseErrorStyle = cli.ErrorStyle - releaseDimStyle = cli.DimStyle - releaseValueStyle = cli.ValueStyle -) - -// Flag variables for ci command -var ( - ciGoForLaunch bool - ciVersion string - ciDraft bool - ciPrerelease bool -) - -// Flag variables for changelog subcommand -var ( - changelogFromRef string - changelogToRef string -) - -var ciCmd = &cli.Command{ - Use: "ci", - Short: i18n.T("cmd.ci.short"), - Long: i18n.T("cmd.ci.long"), - RunE: func(cmd *cli.Command, args []string) error { - dryRun := !ciGoForLaunch - return runCIPublish(dryRun, ciVersion, ciDraft, ciPrerelease) - }, -} - -var ciInitCmd = &cli.Command{ - Use: "init", - Short: i18n.T("cmd.ci.init.short"), - Long: i18n.T("cmd.ci.init.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runCIReleaseInit() - }, -} - -var ciChangelogCmd = &cli.Command{ - Use: "changelog", - Short: i18n.T("cmd.ci.changelog.short"), - Long: i18n.T("cmd.ci.changelog.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runChangelog(changelogFromRef, changelogToRef) - }, -} - -var ciVersionCmd = &cli.Command{ - Use: "version", - Short: i18n.T("cmd.ci.version.short"), - Long: i18n.T("cmd.ci.version.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runCIReleaseVersion() - }, -} - -func init() { - // Main ci command flags - ciCmd.Flags().BoolVar(&ciGoForLaunch, "we-are-go-for-launch", false, i18n.T("cmd.ci.flag.go_for_launch")) - ciCmd.Flags().StringVar(&ciVersion, "version", "", i18n.T("cmd.ci.flag.version")) - ciCmd.Flags().BoolVar(&ciDraft, "draft", false, i18n.T("cmd.ci.flag.draft")) - ciCmd.Flags().BoolVar(&ciPrerelease, "prerelease", false, i18n.T("cmd.ci.flag.prerelease")) - - // Changelog subcommand flags - ciChangelogCmd.Flags().StringVar(&changelogFromRef, "from", "", i18n.T("cmd.ci.changelog.flag.from")) - ciChangelogCmd.Flags().StringVar(&changelogToRef, "to", "", i18n.T("cmd.ci.changelog.flag.to")) - - // Add subcommands - ciCmd.AddCommand(ciInitCmd) - ciCmd.AddCommand(ciChangelogCmd) - ciCmd.AddCommand(ciVersionCmd) -} diff --git a/pkg/ci/cmd_commands.go b/pkg/ci/cmd_commands.go deleted file mode 100644 index bf279c4..0000000 --- a/pkg/ci/cmd_commands.go +++ /dev/null @@ -1,23 +0,0 @@ -// Package ci provides release publishing commands for CI/CD pipelines. -// -// Publishes pre-built artifacts from dist/ to configured targets: -// - GitHub Releases -// - S3-compatible storage -// - Custom endpoints -// -// Safe by default: runs in dry-run mode unless --we-are-go-for-launch is specified. -// Configuration via .core/release.yaml. -package ci - -import ( - "github.com/host-uk/core/pkg/cli" -) - -func init() { - cli.RegisterCommands(AddCICommands) -} - -// AddCICommands registers the 'ci' command and all subcommands. -func AddCICommands(root *cli.Command) { - root.AddCommand(ciCmd) -} diff --git a/pkg/ci/cmd_init.go b/pkg/ci/cmd_init.go deleted file mode 100644 index cb3b50d..0000000 --- a/pkg/ci/cmd_init.go +++ /dev/null @@ -1,43 +0,0 @@ -package ci - -import ( - "os" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/release" -) - -func runCIReleaseInit() error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - cli.Print("%s %s\n\n", releaseDimStyle.Render(i18n.Label("init")), i18n.T("cmd.ci.init.initializing")) - - // Check if already initialized - if release.ConfigExists(cwd) { - cli.Text(i18n.T("cmd.ci.init.already_initialized")) - return nil - } - - // Create release config - cfg := release.DefaultConfig() - if err := release.WriteConfig(cfg, cwd); err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.create", "config"), err) - } - - cli.Blank() - cli.Print("%s %s\n", releaseSuccessStyle.Render("v"), i18n.T("cmd.ci.init.created_config")) - - // Templates init removed as functionality not exposed - - cli.Blank() - - cli.Text(i18n.T("cmd.ci.init.next_steps")) - cli.Print(" %s\n", i18n.T("cmd.ci.init.edit_config")) - cli.Print(" %s\n", i18n.T("cmd.ci.init.run_ci")) - - return nil -} \ No newline at end of file diff --git a/pkg/ci/cmd_publish.go b/pkg/ci/cmd_publish.go deleted file mode 100644 index 23b0c4e..0000000 --- a/pkg/ci/cmd_publish.go +++ /dev/null @@ -1,81 +0,0 @@ -package ci - -import ( - "context" - "errors" - "os" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/release" -) - -// runCIPublish publishes pre-built artifacts from dist/. -// It does NOT build - use `core build` first. -func runCIPublish(dryRun bool, version string, draft, prerelease bool) error { - ctx := context.Background() - - // Get current directory - projectDir, err := os.Getwd() - if err != nil { - return cli.WrapVerb(err, "get", "working directory") - } - - // Load configuration - cfg, err := release.LoadConfig(projectDir) - if err != nil { - return cli.WrapVerb(err, "load", "config") - } - - // Apply CLI overrides - if version != "" { - cfg.SetVersion(version) - } - - // Apply draft/prerelease overrides to all publishers - if draft || prerelease { - for i := range cfg.Publishers { - if draft { - cfg.Publishers[i].Draft = true - } - if prerelease { - cfg.Publishers[i].Prerelease = true - } - } - } - - // Print header - cli.Print("%s %s\n", releaseHeaderStyle.Render(i18n.T("cmd.ci.label.ci")), i18n.T("cmd.ci.publishing")) - if dryRun { - cli.Print(" %s\n", releaseDimStyle.Render(i18n.T("cmd.ci.dry_run_hint"))) - } else { - cli.Print(" %s\n", releaseSuccessStyle.Render(i18n.T("cmd.ci.go_for_launch"))) - } - cli.Blank() - - // Check for publishers - if len(cfg.Publishers) == 0 { - return errors.New(i18n.T("cmd.ci.error.no_publishers")) - } - - // Publish pre-built artifacts - rel, err := release.Publish(ctx, cfg, dryRun) - if err != nil { - cli.Print("%s %v\n", releaseErrorStyle.Render(i18n.Label("error")), err) - return err - } - - // Print summary - cli.Blank() - cli.Print("%s %s\n", releaseSuccessStyle.Render(i18n.T("i18n.done.pass")), i18n.T("cmd.ci.publish_completed")) - cli.Print(" %s %s\n", i18n.Label("version"), releaseValueStyle.Render(rel.Version)) - cli.Print(" %s %d\n", i18n.T("cmd.ci.label.artifacts"), len(rel.Artifacts)) - - if !dryRun { - for _, pub := range cfg.Publishers { - cli.Print(" %s %s\n", i18n.T("cmd.ci.label.published"), releaseValueStyle.Render(pub.Type)) - } - } - - return nil -} diff --git a/pkg/ci/cmd_version.go b/pkg/ci/cmd_version.go deleted file mode 100644 index f38127c..0000000 --- a/pkg/ci/cmd_version.go +++ /dev/null @@ -1,25 +0,0 @@ -package ci - -import ( - "os" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/release" -) - -// runCIReleaseVersion shows the determined version. -func runCIReleaseVersion() error { - projectDir, err := os.Getwd() - if err != nil { - return cli.WrapVerb(err, "get", "working directory") - } - - version, err := release.DetermineVersion(projectDir) - if err != nil { - return cli.WrapVerb(err, "determine", "version") - } - - cli.Print("%s %s\n", i18n.Label("version"), releaseValueStyle.Render(version)) - return nil -} diff --git a/pkg/cli/ansi.go b/pkg/cli/ansi.go deleted file mode 100644 index 2e58b3e..0000000 --- a/pkg/cli/ansi.go +++ /dev/null @@ -1,125 +0,0 @@ -package cli - -import ( - "fmt" - "strconv" - "strings" -) - -// ANSI escape codes -const ( - ansiReset = "\033[0m" - ansiBold = "\033[1m" - ansiDim = "\033[2m" - ansiItalic = "\033[3m" - ansiUnderline = "\033[4m" -) - -// AnsiStyle represents terminal text styling. -// Use NewStyle() to create, chain methods, call Render(). -type AnsiStyle struct { - bold bool - dim bool - italic bool - underline bool - fg string - bg string -} - -// NewStyle creates a new empty style. -func NewStyle() *AnsiStyle { - return &AnsiStyle{} -} - -// Bold enables bold text. -func (s *AnsiStyle) Bold() *AnsiStyle { - s.bold = true - return s -} - -// Dim enables dim text. -func (s *AnsiStyle) Dim() *AnsiStyle { - s.dim = true - return s -} - -// Italic enables italic text. -func (s *AnsiStyle) Italic() *AnsiStyle { - s.italic = true - return s -} - -// Underline enables underlined text. -func (s *AnsiStyle) Underline() *AnsiStyle { - s.underline = true - return s -} - -// Foreground sets foreground color from hex string. -func (s *AnsiStyle) Foreground(hex string) *AnsiStyle { - s.fg = fgColorHex(hex) - return s -} - -// Background sets background color from hex string. -func (s *AnsiStyle) Background(hex string) *AnsiStyle { - s.bg = bgColorHex(hex) - return s -} - -// Render applies the style to text. -func (s *AnsiStyle) Render(text string) string { - if s == nil { - return text - } - - var codes []string - if s.bold { - codes = append(codes, ansiBold) - } - if s.dim { - codes = append(codes, ansiDim) - } - if s.italic { - codes = append(codes, ansiItalic) - } - if s.underline { - codes = append(codes, ansiUnderline) - } - if s.fg != "" { - codes = append(codes, s.fg) - } - if s.bg != "" { - codes = append(codes, s.bg) - } - - if len(codes) == 0 { - return text - } - - return strings.Join(codes, "") + text + ansiReset -} - -// fgColorHex converts a hex string to an ANSI foreground color code. -func fgColorHex(hex string) string { - r, g, b := hexToRGB(hex) - return fmt.Sprintf("\033[38;2;%d;%d;%dm", r, g, b) -} - -// bgColorHex converts a hex string to an ANSI background color code. -func bgColorHex(hex string) string { - r, g, b := hexToRGB(hex) - return fmt.Sprintf("\033[48;2;%d;%d;%dm", r, g, b) -} - -// hexToRGB converts a hex string to RGB values. -func hexToRGB(hex string) (int, int, int) { - hex = strings.TrimPrefix(hex, "#") - if len(hex) != 6 { - return 255, 255, 255 - } - r, _ := strconv.ParseInt(hex[0:2], 16, 64) - g, _ := strconv.ParseInt(hex[2:4], 16, 64) - b, _ := strconv.ParseInt(hex[4:6], 16, 64) - return int(r), int(g), int(b) -} \ No newline at end of file diff --git a/pkg/cli/ansi_test.go b/pkg/cli/ansi_test.go deleted file mode 100644 index 75ace2c..0000000 --- a/pkg/cli/ansi_test.go +++ /dev/null @@ -1,20 +0,0 @@ -package cli - -import ( - "strings" - "testing" -) - -func TestAnsiStyle_Render(t *testing.T) { - s := NewStyle().Bold().Foreground("#ff0000") - got := s.Render("test") - if got == "test" { - t.Error("Expected styled output") - } - if !strings.Contains(got, "test") { - t.Error("Output should contain text") - } - if !strings.Contains(got, "[1m") { - t.Error("Output should contain bold code") - } -} diff --git a/pkg/cli/app.go b/pkg/cli/app.go deleted file mode 100644 index 0215a88..0000000 --- a/pkg/cli/app.go +++ /dev/null @@ -1,101 +0,0 @@ -package cli - -import ( - "os" - - "github.com/host-uk/core/pkg/framework" - "github.com/host-uk/core/pkg/log" - "github.com/spf13/cobra" -) - -const ( - // AppName is the CLI application name. - AppName = "core" -) - -// AppVersion is set at build time via ldflags: -// -// go build -ldflags="-X github.com/host-uk/core/pkg/cli.AppVersion=v1.0.0" -var AppVersion = "dev" - -// Main initialises and runs the CLI application. -// This is the main entry point for the CLI. -// Exits with code 1 on error. -func Main() { - // Initialise CLI runtime with services - if err := Init(Options{ - AppName: AppName, - Version: AppVersion, - Services: []framework.Option{ - framework.WithName("i18n", NewI18nService(I18nOptions{})), - framework.WithName("log", NewLogService(log.Options{ - Level: log.LevelInfo, - })), - }, - }); err != nil { - Fatal(err) - } - defer Shutdown() - - // Add completion command to the CLI's root - RootCmd().AddCommand(completionCmd) - - Fatal(Execute()) -} - -// completionCmd generates shell completion scripts. -var completionCmd = &cobra.Command{ - Use: "completion [bash|zsh|fish|powershell]", - Short: "Generate shell completion script", - Long: `Generate shell completion script for the specified shell. - -To load completions: - -Bash: - $ source <(core completion bash) - - # To load completions for each session, execute once: - # Linux: - $ core completion bash > /etc/bash_completion.d/core - # macOS: - $ core completion bash > $(brew --prefix)/etc/bash_completion.d/core - -Zsh: - # If shell completion is not already enabled in your environment, - # you will need to enable it. You can execute the following once: - $ echo "autoload -U compinit; compinit" >> ~/.zshrc - - # To load completions for each session, execute once: - $ core completion zsh > "${fpath[1]}/_core" - - # You will need to start a new shell for this setup to take effect. - -Fish: - $ core completion fish | source - - # To load completions for each session, execute once: - $ core completion fish > ~/.config/fish/completions/core.fish - -PowerShell: - PS> core completion powershell | Out-String | Invoke-Expression - - # To load completions for every new session, run: - PS> core completion powershell > core.ps1 - # and source this file from your PowerShell profile. -`, - DisableFlagsInUseLine: true, - ValidArgs: []string{"bash", "zsh", "fish", "powershell"}, - Args: cobra.MatchAll(cobra.ExactArgs(1), cobra.OnlyValidArgs), - Run: func(cmd *cobra.Command, args []string) { - switch args[0] { - case "bash": - _ = cmd.Root().GenBashCompletion(os.Stdout) - case "zsh": - _ = cmd.Root().GenZshCompletion(os.Stdout) - case "fish": - _ = cmd.Root().GenFishCompletion(os.Stdout, true) - case "powershell": - _ = cmd.Root().GenPowerShellCompletionWithDesc(os.Stdout) - } - }, -} diff --git a/pkg/cli/check.go b/pkg/cli/check.go deleted file mode 100644 index a6c9e9e..0000000 --- a/pkg/cli/check.go +++ /dev/null @@ -1,91 +0,0 @@ -package cli - -import "fmt" - -// CheckBuilder provides fluent API for check results. -type CheckBuilder struct { - name string - status string - style *AnsiStyle - icon string - duration string -} - -// Check starts building a check result line. -// -// cli.Check("audit").Pass() -// cli.Check("fmt").Fail().Duration("2.3s") -// cli.Check("test").Skip() -func Check(name string) *CheckBuilder { - return &CheckBuilder{name: name} -} - -// Pass marks the check as passed. -func (c *CheckBuilder) Pass() *CheckBuilder { - c.status = "passed" - c.style = SuccessStyle - c.icon = Glyph(":check:") - return c -} - -// Fail marks the check as failed. -func (c *CheckBuilder) Fail() *CheckBuilder { - c.status = "failed" - c.style = ErrorStyle - c.icon = Glyph(":cross:") - return c -} - -// Skip marks the check as skipped. -func (c *CheckBuilder) Skip() *CheckBuilder { - c.status = "skipped" - c.style = DimStyle - c.icon = "-" - return c -} - -// Warn marks the check as warning. -func (c *CheckBuilder) Warn() *CheckBuilder { - c.status = "warning" - c.style = WarningStyle - c.icon = Glyph(":warn:") - return c -} - -// Duration adds duration to the check result. -func (c *CheckBuilder) Duration(d string) *CheckBuilder { - c.duration = d - return c -} - -// Message adds a custom message instead of status. -func (c *CheckBuilder) Message(msg string) *CheckBuilder { - c.status = msg - return c -} - -// String returns the formatted check line. -func (c *CheckBuilder) String() string { - icon := c.icon - if c.style != nil { - icon = c.style.Render(c.icon) - } - - status := c.status - if c.style != nil && c.status != "" { - status = c.style.Render(c.status) - } - - if c.duration != "" { - return fmt.Sprintf(" %s %-20s %-10s %s", icon, c.name, status, DimStyle.Render(c.duration)) - } - if status != "" { - return fmt.Sprintf(" %s %s %s", icon, c.name, status) - } - return fmt.Sprintf(" %s %s", icon, c.name) -} - -// Print outputs the check result. -func (c *CheckBuilder) Print() { - fmt.Println(c.String()) -} \ No newline at end of file diff --git a/pkg/cli/check_test.go b/pkg/cli/check_test.go deleted file mode 100644 index 760853c..0000000 --- a/pkg/cli/check_test.go +++ /dev/null @@ -1,49 +0,0 @@ -package cli - -import "testing" - -func TestCheckBuilder(t *testing.T) { - UseASCII() // Deterministic output - - // Pass - c := Check("foo").Pass() - got := c.String() - if got == "" { - t.Error("Empty output for Pass") - } - - // Fail - c = Check("foo").Fail() - got = c.String() - if got == "" { - t.Error("Empty output for Fail") - } - - // Skip - c = Check("foo").Skip() - got = c.String() - if got == "" { - t.Error("Empty output for Skip") - } - - // Warn - c = Check("foo").Warn() - got = c.String() - if got == "" { - t.Error("Empty output for Warn") - } - - // Duration - c = Check("foo").Pass().Duration("1s") - got = c.String() - if got == "" { - t.Error("Empty output for Duration") - } - - // Message - c = Check("foo").Message("status") - got = c.String() - if got == "" { - t.Error("Empty output for Message") - } -} diff --git a/pkg/cli/command.go b/pkg/cli/command.go deleted file mode 100644 index 31b6e1b..0000000 --- a/pkg/cli/command.go +++ /dev/null @@ -1,193 +0,0 @@ -package cli - -import ( - "github.com/spf13/cobra" -) - -// ───────────────────────────────────────────────────────────────────────────── -// Command Type Re-export -// ───────────────────────────────────────────────────────────────────────────── - -// Command is the cobra command type. -// Re-exported for convenience so packages don't need to import cobra directly. -type Command = cobra.Command - -// ───────────────────────────────────────────────────────────────────────────── -// Command Builders -// ───────────────────────────────────────────────────────────────────────────── - -// NewCommand creates a new command with a RunE handler. -// This is the standard way to create commands that may return errors. -// -// cmd := cli.NewCommand("build", "Build the project", "", func(cmd *cli.Command, args []string) error { -// // Build logic -// return nil -// }) -func NewCommand(use, short, long string, run func(cmd *Command, args []string) error) *Command { - cmd := &Command{ - Use: use, - Short: short, - RunE: run, - } - if long != "" { - cmd.Long = long - } - return cmd -} - -// NewGroup creates a new command group (no RunE). -// Use this for parent commands that only contain subcommands. -// -// devCmd := cli.NewGroup("dev", "Development commands", "") -// devCmd.AddCommand(buildCmd, testCmd) -func NewGroup(use, short, long string) *Command { - cmd := &Command{ - Use: use, - Short: short, - } - if long != "" { - cmd.Long = long - } - return cmd -} - -// NewRun creates a new command with a simple Run handler (no error return). -// Use when the command cannot fail. -// -// cmd := cli.NewRun("version", "Show version", "", func(cmd *cli.Command, args []string) { -// cli.Println("v1.0.0") -// }) -func NewRun(use, short, long string, run func(cmd *Command, args []string)) *Command { - cmd := &Command{ - Use: use, - Short: short, - Run: run, - } - if long != "" { - cmd.Long = long - } - return cmd -} - -// ───────────────────────────────────────────────────────────────────────────── -// Flag Helpers -// ───────────────────────────────────────────────────────────────────────────── - -// StringFlag adds a string flag to a command. -// The value will be stored in the provided pointer. -// -// var output string -// cli.StringFlag(cmd, &output, "output", "o", "", "Output file path") -func StringFlag(cmd *Command, ptr *string, name, short, def, usage string) { - if short != "" { - cmd.Flags().StringVarP(ptr, name, short, def, usage) - } else { - cmd.Flags().StringVar(ptr, name, def, usage) - } -} - -// BoolFlag adds a boolean flag to a command. -// The value will be stored in the provided pointer. -// -// var verbose bool -// cli.BoolFlag(cmd, &verbose, "verbose", "v", false, "Enable verbose output") -func BoolFlag(cmd *Command, ptr *bool, name, short string, def bool, usage string) { - if short != "" { - cmd.Flags().BoolVarP(ptr, name, short, def, usage) - } else { - cmd.Flags().BoolVar(ptr, name, def, usage) - } -} - -// IntFlag adds an integer flag to a command. -// The value will be stored in the provided pointer. -// -// var count int -// cli.IntFlag(cmd, &count, "count", "n", 10, "Number of items") -func IntFlag(cmd *Command, ptr *int, name, short string, def int, usage string) { - if short != "" { - cmd.Flags().IntVarP(ptr, name, short, def, usage) - } else { - cmd.Flags().IntVar(ptr, name, def, usage) - } -} - -// StringSliceFlag adds a string slice flag to a command. -// The value will be stored in the provided pointer. -// -// var tags []string -// cli.StringSliceFlag(cmd, &tags, "tag", "t", nil, "Tags to apply") -func StringSliceFlag(cmd *Command, ptr *[]string, name, short string, def []string, usage string) { - if short != "" { - cmd.Flags().StringSliceVarP(ptr, name, short, def, usage) - } else { - cmd.Flags().StringSliceVar(ptr, name, def, usage) - } -} - -// ───────────────────────────────────────────────────────────────────────────── -// Persistent Flag Helpers -// ───────────────────────────────────────────────────────────────────────────── - -// PersistentStringFlag adds a persistent string flag (inherited by subcommands). -func PersistentStringFlag(cmd *Command, ptr *string, name, short, def, usage string) { - if short != "" { - cmd.PersistentFlags().StringVarP(ptr, name, short, def, usage) - } else { - cmd.PersistentFlags().StringVar(ptr, name, def, usage) - } -} - -// PersistentBoolFlag adds a persistent boolean flag (inherited by subcommands). -func PersistentBoolFlag(cmd *Command, ptr *bool, name, short string, def bool, usage string) { - if short != "" { - cmd.PersistentFlags().BoolVarP(ptr, name, short, def, usage) - } else { - cmd.PersistentFlags().BoolVar(ptr, name, def, usage) - } -} - -// ───────────────────────────────────────────────────────────────────────────── -// Command Configuration -// ───────────────────────────────────────────────────────────────────────────── - -// WithArgs sets the Args validation function for a command. -// Returns the command for chaining. -// -// cmd := cli.NewCommand("build", "Build", "", run).WithArgs(cobra.ExactArgs(1)) -func WithArgs(cmd *Command, args cobra.PositionalArgs) *Command { - cmd.Args = args - return cmd -} - -// WithExample sets the Example field for a command. -// Returns the command for chaining. -func WithExample(cmd *Command, example string) *Command { - cmd.Example = example - return cmd -} - -// ExactArgs returns a PositionalArgs that accepts exactly N arguments. -func ExactArgs(n int) cobra.PositionalArgs { - return cobra.ExactArgs(n) -} - -// MinimumNArgs returns a PositionalArgs that accepts minimum N arguments. -func MinimumNArgs(n int) cobra.PositionalArgs { - return cobra.MinimumNArgs(n) -} - -// MaximumNArgs returns a PositionalArgs that accepts maximum N arguments. -func MaximumNArgs(n int) cobra.PositionalArgs { - return cobra.MaximumNArgs(n) -} - -// NoArgs returns a PositionalArgs that accepts no arguments. -func NoArgs() cobra.PositionalArgs { - return cobra.NoArgs -} - -// ArbitraryArgs returns a PositionalArgs that accepts any arguments. -func ArbitraryArgs() cobra.PositionalArgs { - return cobra.ArbitraryArgs -} diff --git a/pkg/cli/commands.go b/pkg/cli/commands.go deleted file mode 100644 index 20ea2da..0000000 --- a/pkg/cli/commands.go +++ /dev/null @@ -1,50 +0,0 @@ -// Package cli provides the CLI runtime and utilities. -package cli - -import ( - "sync" - - "github.com/spf13/cobra" -) - -// CommandRegistration is a function that adds commands to the root. -type CommandRegistration func(root *cobra.Command) - -var ( - registeredCommands []CommandRegistration - registeredCommandsMu sync.Mutex - commandsAttached bool -) - -// RegisterCommands registers a function that adds commands to the CLI. -// Call this in your package's init() to register commands. -// -// func init() { -// cli.RegisterCommands(AddCommands) -// } -// -// func AddCommands(root *cobra.Command) { -// root.AddCommand(myCmd) -// } -func RegisterCommands(fn CommandRegistration) { - registeredCommandsMu.Lock() - defer registeredCommandsMu.Unlock() - registeredCommands = append(registeredCommands, fn) - - // If commands already attached (CLI already running), attach immediately - if commandsAttached && instance != nil && instance.root != nil { - fn(instance.root) - } -} - -// attachRegisteredCommands calls all registered command functions. -// Called by Init() after creating the root command. -func attachRegisteredCommands(root *cobra.Command) { - registeredCommandsMu.Lock() - defer registeredCommandsMu.Unlock() - - for _, fn := range registeredCommands { - fn(root) - } - commandsAttached = true -} diff --git a/pkg/cli/daemon.go b/pkg/cli/daemon.go deleted file mode 100644 index 74cb0c7..0000000 --- a/pkg/cli/daemon.go +++ /dev/null @@ -1,445 +0,0 @@ -// Package cli provides the CLI runtime and utilities. -package cli - -import ( - "context" - "fmt" - "net" - "net/http" - "os" - "path/filepath" - "strconv" - "sync" - "syscall" - "time" - - "golang.org/x/term" -) - -// Mode represents the CLI execution mode. -type Mode int - -const ( - // ModeInteractive indicates TTY attached with coloured output. - ModeInteractive Mode = iota - // ModePipe indicates stdout is piped, colours disabled. - ModePipe - // ModeDaemon indicates headless execution, log-only output. - ModeDaemon -) - -// String returns the string representation of the Mode. -func (m Mode) String() string { - switch m { - case ModeInteractive: - return "interactive" - case ModePipe: - return "pipe" - case ModeDaemon: - return "daemon" - default: - return "unknown" - } -} - -// DetectMode determines the execution mode based on environment. -// Checks CORE_DAEMON env var first, then TTY status. -func DetectMode() Mode { - if os.Getenv("CORE_DAEMON") == "1" { - return ModeDaemon - } - if !IsTTY() { - return ModePipe - } - return ModeInteractive -} - -// IsTTY returns true if stdout is a terminal. -func IsTTY() bool { - return term.IsTerminal(int(os.Stdout.Fd())) -} - -// IsStdinTTY returns true if stdin is a terminal. -func IsStdinTTY() bool { - return term.IsTerminal(int(os.Stdin.Fd())) -} - -// IsStderrTTY returns true if stderr is a terminal. -func IsStderrTTY() bool { - return term.IsTerminal(int(os.Stderr.Fd())) -} - -// --- PID File Management --- - -// PIDFile manages a process ID file for single-instance enforcement. -type PIDFile struct { - path string - mu sync.Mutex -} - -// NewPIDFile creates a PID file manager. -func NewPIDFile(path string) *PIDFile { - return &PIDFile{path: path} -} - -// Acquire writes the current PID to the file. -// Returns error if another instance is running. -func (p *PIDFile) Acquire() error { - p.mu.Lock() - defer p.mu.Unlock() - - // Check if PID file exists - if data, err := os.ReadFile(p.path); err == nil { - pid, err := strconv.Atoi(string(data)) - if err == nil && pid > 0 { - // Check if process is still running - if process, err := os.FindProcess(pid); err == nil { - if err := process.Signal(syscall.Signal(0)); err == nil { - return fmt.Errorf("another instance is running (PID %d)", pid) - } - } - } - // Stale PID file, remove it - os.Remove(p.path) - } - - // Ensure directory exists - if dir := filepath.Dir(p.path); dir != "." { - if err := os.MkdirAll(dir, 0755); err != nil { - return fmt.Errorf("failed to create PID directory: %w", err) - } - } - - // Write current PID - pid := os.Getpid() - if err := os.WriteFile(p.path, []byte(strconv.Itoa(pid)), 0644); err != nil { - return fmt.Errorf("failed to write PID file: %w", err) - } - - return nil -} - -// Release removes the PID file. -func (p *PIDFile) Release() error { - p.mu.Lock() - defer p.mu.Unlock() - return os.Remove(p.path) -} - -// Path returns the PID file path. -func (p *PIDFile) Path() string { - return p.path -} - -// --- Health Check Server --- - -// HealthServer provides a minimal HTTP health check endpoint. -type HealthServer struct { - addr string - server *http.Server - listener net.Listener - mu sync.Mutex - ready bool - checks []HealthCheck -} - -// HealthCheck is a function that returns nil if healthy. -type HealthCheck func() error - -// NewHealthServer creates a health check server. -func NewHealthServer(addr string) *HealthServer { - return &HealthServer{ - addr: addr, - ready: true, - } -} - -// AddCheck registers a health check function. -func (h *HealthServer) AddCheck(check HealthCheck) { - h.mu.Lock() - h.checks = append(h.checks, check) - h.mu.Unlock() -} - -// SetReady sets the readiness status. -func (h *HealthServer) SetReady(ready bool) { - h.mu.Lock() - h.ready = ready - h.mu.Unlock() -} - -// Start begins serving health check endpoints. -// Endpoints: -// - /health - liveness probe (always 200 if server is up) -// - /ready - readiness probe (200 if ready, 503 if not) -func (h *HealthServer) Start() error { - mux := http.NewServeMux() - - mux.HandleFunc("/health", func(w http.ResponseWriter, r *http.Request) { - h.mu.Lock() - checks := h.checks - h.mu.Unlock() - - for _, check := range checks { - if err := check(); err != nil { - w.WriteHeader(http.StatusServiceUnavailable) - fmt.Fprintf(w, "unhealthy: %v\n", err) - return - } - } - - w.WriteHeader(http.StatusOK) - fmt.Fprintln(w, "ok") - }) - - mux.HandleFunc("/ready", func(w http.ResponseWriter, r *http.Request) { - h.mu.Lock() - ready := h.ready - h.mu.Unlock() - - if !ready { - w.WriteHeader(http.StatusServiceUnavailable) - fmt.Fprintln(w, "not ready") - return - } - - w.WriteHeader(http.StatusOK) - fmt.Fprintln(w, "ready") - }) - - listener, err := net.Listen("tcp", h.addr) - if err != nil { - return fmt.Errorf("failed to listen on %s: %w", h.addr, err) - } - - h.listener = listener - h.server = &http.Server{Handler: mux} - - go func() { - if err := h.server.Serve(listener); err != http.ErrServerClosed { - LogError(fmt.Sprintf("health server error: %v", err)) - } - }() - - return nil -} - -// Stop gracefully shuts down the health server. -func (h *HealthServer) Stop(ctx context.Context) error { - if h.server == nil { - return nil - } - return h.server.Shutdown(ctx) -} - -// Addr returns the actual address the server is listening on. -// Useful when using port 0 for dynamic port assignment. -func (h *HealthServer) Addr() string { - if h.listener != nil { - return h.listener.Addr().String() - } - return h.addr -} - -// --- Daemon Runner --- - -// DaemonOptions configures daemon mode execution. -type DaemonOptions struct { - // PIDFile path for single-instance enforcement. - // Leave empty to skip PID file management. - PIDFile string - - // ShutdownTimeout is the maximum time to wait for graceful shutdown. - // Default: 30 seconds. - ShutdownTimeout time.Duration - - // HealthAddr is the address for health check endpoints. - // Example: ":8080", "127.0.0.1:9000" - // Leave empty to disable health checks. - HealthAddr string - - // HealthChecks are additional health check functions. - HealthChecks []HealthCheck - - // OnReload is called when SIGHUP is received. - // Use for config reloading. Leave nil to ignore SIGHUP. - OnReload func() error -} - -// Daemon manages daemon lifecycle. -type Daemon struct { - opts DaemonOptions - pid *PIDFile - health *HealthServer - reload chan struct{} - running bool - mu sync.Mutex -} - -// NewDaemon creates a daemon runner with the given options. -func NewDaemon(opts DaemonOptions) *Daemon { - if opts.ShutdownTimeout == 0 { - opts.ShutdownTimeout = 30 * time.Second - } - - d := &Daemon{ - opts: opts, - reload: make(chan struct{}, 1), - } - - if opts.PIDFile != "" { - d.pid = NewPIDFile(opts.PIDFile) - } - - if opts.HealthAddr != "" { - d.health = NewHealthServer(opts.HealthAddr) - for _, check := range opts.HealthChecks { - d.health.AddCheck(check) - } - } - - return d -} - -// Start initialises the daemon (PID file, health server). -// Call this after cli.Init(). -func (d *Daemon) Start() error { - d.mu.Lock() - defer d.mu.Unlock() - - if d.running { - return fmt.Errorf("daemon already running") - } - - // Acquire PID file - if d.pid != nil { - if err := d.pid.Acquire(); err != nil { - return err - } - } - - // Start health server - if d.health != nil { - if err := d.health.Start(); err != nil { - if d.pid != nil { - d.pid.Release() - } - return err - } - } - - d.running = true - return nil -} - -// Run blocks until the context is cancelled or a signal is received. -// Handles graceful shutdown with the configured timeout. -func (d *Daemon) Run(ctx context.Context) error { - d.mu.Lock() - if !d.running { - d.mu.Unlock() - return fmt.Errorf("daemon not started - call Start() first") - } - d.mu.Unlock() - - // Wait for context cancellation (from signal handler) - <-ctx.Done() - - return d.Stop() -} - -// Stop performs graceful shutdown. -func (d *Daemon) Stop() error { - d.mu.Lock() - defer d.mu.Unlock() - - if !d.running { - return nil - } - - var errs []error - - // Create shutdown context with timeout - shutdownCtx, cancel := context.WithTimeout(context.Background(), d.opts.ShutdownTimeout) - defer cancel() - - // Stop health server - if d.health != nil { - d.health.SetReady(false) - if err := d.health.Stop(shutdownCtx); err != nil { - errs = append(errs, fmt.Errorf("health server: %w", err)) - } - } - - // Release PID file - if d.pid != nil { - if err := d.pid.Release(); err != nil && !os.IsNotExist(err) { - errs = append(errs, fmt.Errorf("pid file: %w", err)) - } - } - - d.running = false - - if len(errs) > 0 { - return fmt.Errorf("shutdown errors: %v", errs) - } - return nil -} - -// SetReady sets the daemon readiness status for health checks. -func (d *Daemon) SetReady(ready bool) { - if d.health != nil { - d.health.SetReady(ready) - } -} - -// HealthAddr returns the health server address, or empty if disabled. -func (d *Daemon) HealthAddr() string { - if d.health != nil { - return d.health.Addr() - } - return "" -} - -// --- Convenience Functions --- - -// Run blocks until context is cancelled or signal received. -// Simple helper for daemon mode without advanced features. -// -// cli.Init(cli.Options{AppName: "myapp"}) -// defer cli.Shutdown() -// cli.Run(cli.Context()) -func Run(ctx context.Context) error { - mustInit() - <-ctx.Done() - return ctx.Err() -} - -// RunWithTimeout wraps Run with a graceful shutdown timeout. -// The returned function should be deferred to replace cli.Shutdown(). -// -// cli.Init(cli.Options{AppName: "myapp"}) -// shutdown := cli.RunWithTimeout(30 * time.Second) -// defer shutdown() -// cli.Run(cli.Context()) -func RunWithTimeout(timeout time.Duration) func() { - return func() { - ctx, cancel := context.WithTimeout(context.Background(), timeout) - defer cancel() - - // Create done channel for shutdown completion - done := make(chan struct{}) - go func() { - Shutdown() - close(done) - }() - - select { - case <-done: - // Clean shutdown - case <-ctx.Done(): - // Timeout - force exit - LogWarn("shutdown timeout exceeded, forcing exit") - } - } -} diff --git a/pkg/cli/daemon_test.go b/pkg/cli/daemon_test.go deleted file mode 100644 index 214822b..0000000 --- a/pkg/cli/daemon_test.go +++ /dev/null @@ -1,255 +0,0 @@ -package cli - -import ( - "context" - "net/http" - "os" - "path/filepath" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestDetectMode(t *testing.T) { - t.Run("daemon mode from env", func(t *testing.T) { - t.Setenv("CORE_DAEMON", "1") - assert.Equal(t, ModeDaemon, DetectMode()) - }) - - t.Run("mode string", func(t *testing.T) { - assert.Equal(t, "interactive", ModeInteractive.String()) - assert.Equal(t, "pipe", ModePipe.String()) - assert.Equal(t, "daemon", ModeDaemon.String()) - assert.Equal(t, "unknown", Mode(99).String()) - }) -} - -func TestPIDFile(t *testing.T) { - t.Run("acquire and release", func(t *testing.T) { - tmpDir := t.TempDir() - pidPath := filepath.Join(tmpDir, "test.pid") - - pid := NewPIDFile(pidPath) - - // Acquire should succeed - err := pid.Acquire() - require.NoError(t, err) - - // File should exist with our PID - data, err := os.ReadFile(pidPath) - require.NoError(t, err) - assert.Contains(t, string(data), "") - - // Release should remove file - err = pid.Release() - require.NoError(t, err) - - _, err = os.Stat(pidPath) - assert.True(t, os.IsNotExist(err)) - }) - - t.Run("stale pid file", func(t *testing.T) { - tmpDir := t.TempDir() - pidPath := filepath.Join(tmpDir, "stale.pid") - - // Write a stale PID (non-existent process) - err := os.WriteFile(pidPath, []byte("999999999"), 0644) - require.NoError(t, err) - - pid := NewPIDFile(pidPath) - - // Should acquire successfully (stale PID removed) - err = pid.Acquire() - require.NoError(t, err) - - err = pid.Release() - require.NoError(t, err) - }) - - t.Run("creates parent directory", func(t *testing.T) { - tmpDir := t.TempDir() - pidPath := filepath.Join(tmpDir, "subdir", "nested", "test.pid") - - pid := NewPIDFile(pidPath) - - err := pid.Acquire() - require.NoError(t, err) - - _, err = os.Stat(pidPath) - require.NoError(t, err) - - err = pid.Release() - require.NoError(t, err) - }) - - t.Run("path getter", func(t *testing.T) { - pid := NewPIDFile("/tmp/test.pid") - assert.Equal(t, "/tmp/test.pid", pid.Path()) - }) -} - -func TestHealthServer(t *testing.T) { - t.Run("health and ready endpoints", func(t *testing.T) { - hs := NewHealthServer("127.0.0.1:0") // Random port - - err := hs.Start() - require.NoError(t, err) - defer hs.Stop(context.Background()) - - addr := hs.Addr() - require.NotEmpty(t, addr) - - // Health should be OK - resp, err := http.Get("http://" + addr + "/health") - require.NoError(t, err) - assert.Equal(t, http.StatusOK, resp.StatusCode) - resp.Body.Close() - - // Ready should be OK by default - resp, err = http.Get("http://" + addr + "/ready") - require.NoError(t, err) - assert.Equal(t, http.StatusOK, resp.StatusCode) - resp.Body.Close() - - // Set not ready - hs.SetReady(false) - - resp, err = http.Get("http://" + addr + "/ready") - require.NoError(t, err) - assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) - resp.Body.Close() - }) - - t.Run("with health checks", func(t *testing.T) { - hs := NewHealthServer("127.0.0.1:0") - - healthy := true - hs.AddCheck(func() error { - if !healthy { - return assert.AnError - } - return nil - }) - - err := hs.Start() - require.NoError(t, err) - defer hs.Stop(context.Background()) - - addr := hs.Addr() - - // Should be healthy - resp, err := http.Get("http://" + addr + "/health") - require.NoError(t, err) - assert.Equal(t, http.StatusOK, resp.StatusCode) - resp.Body.Close() - - // Make unhealthy - healthy = false - - resp, err = http.Get("http://" + addr + "/health") - require.NoError(t, err) - assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) - resp.Body.Close() - }) -} - -func TestDaemon(t *testing.T) { - t.Run("start and stop", func(t *testing.T) { - tmpDir := t.TempDir() - - d := NewDaemon(DaemonOptions{ - PIDFile: filepath.Join(tmpDir, "test.pid"), - HealthAddr: "127.0.0.1:0", - ShutdownTimeout: 5 * time.Second, - }) - - err := d.Start() - require.NoError(t, err) - - // Health server should be running - addr := d.HealthAddr() - require.NotEmpty(t, addr) - - resp, err := http.Get("http://" + addr + "/health") - require.NoError(t, err) - assert.Equal(t, http.StatusOK, resp.StatusCode) - resp.Body.Close() - - // Stop should succeed - err = d.Stop() - require.NoError(t, err) - - // PID file should be removed - _, err = os.Stat(filepath.Join(tmpDir, "test.pid")) - assert.True(t, os.IsNotExist(err)) - }) - - t.Run("double start fails", func(t *testing.T) { - d := NewDaemon(DaemonOptions{ - HealthAddr: "127.0.0.1:0", - }) - - err := d.Start() - require.NoError(t, err) - defer d.Stop() - - err = d.Start() - assert.Error(t, err) - assert.Contains(t, err.Error(), "already running") - }) - - t.Run("run without start fails", func(t *testing.T) { - d := NewDaemon(DaemonOptions{}) - - ctx, cancel := context.WithCancel(context.Background()) - cancel() - - err := d.Run(ctx) - assert.Error(t, err) - assert.Contains(t, err.Error(), "not started") - }) - - t.Run("set ready", func(t *testing.T) { - d := NewDaemon(DaemonOptions{ - HealthAddr: "127.0.0.1:0", - }) - - err := d.Start() - require.NoError(t, err) - defer d.Stop() - - addr := d.HealthAddr() - - // Initially ready - resp, _ := http.Get("http://" + addr + "/ready") - assert.Equal(t, http.StatusOK, resp.StatusCode) - resp.Body.Close() - - // Set not ready - d.SetReady(false) - - resp, _ = http.Get("http://" + addr + "/ready") - assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) - resp.Body.Close() - }) - - t.Run("no health addr returns empty", func(t *testing.T) { - d := NewDaemon(DaemonOptions{}) - assert.Empty(t, d.HealthAddr()) - }) - - t.Run("default shutdown timeout", func(t *testing.T) { - d := NewDaemon(DaemonOptions{}) - assert.Equal(t, 30*time.Second, d.opts.ShutdownTimeout) - }) -} - -func TestRunWithTimeout(t *testing.T) { - t.Run("creates shutdown function", func(t *testing.T) { - // Just test that it returns a function - shutdown := RunWithTimeout(100 * time.Millisecond) - assert.NotNil(t, shutdown) - }) -} diff --git a/pkg/cli/errors.go b/pkg/cli/errors.go deleted file mode 100644 index 3e482a2..0000000 --- a/pkg/cli/errors.go +++ /dev/null @@ -1,124 +0,0 @@ -package cli - -import ( - "errors" - "fmt" - "os" - - "github.com/host-uk/core/pkg/i18n" -) - -// ───────────────────────────────────────────────────────────────────────────── -// Error Creation (replace fmt.Errorf) -// ───────────────────────────────────────────────────────────────────────────── - -// Err creates a new error from a format string. -// This is a direct replacement for fmt.Errorf. -func Err(format string, args ...any) error { - return fmt.Errorf(format, args...) -} - -// Wrap wraps an error with a message. -// Returns nil if err is nil. -// -// return cli.Wrap(err, "load config") // "load config: " -func Wrap(err error, msg string) error { - if err == nil { - return nil - } - return fmt.Errorf("%s: %w", msg, err) -} - -// WrapVerb wraps an error using i18n grammar for "Failed to verb subject". -// Uses the i18n.ActionFailed function for proper grammar composition. -// Returns nil if err is nil. -// -// return cli.WrapVerb(err, "load", "config") // "Failed to load config: " -func WrapVerb(err error, verb, subject string) error { - if err == nil { - return nil - } - msg := i18n.ActionFailed(verb, subject) - return fmt.Errorf("%s: %w", msg, err) -} - -// WrapAction wraps an error using i18n grammar for "Failed to verb". -// Uses the i18n.ActionFailed function for proper grammar composition. -// Returns nil if err is nil. -// -// return cli.WrapAction(err, "connect") // "Failed to connect: " -func WrapAction(err error, verb string) error { - if err == nil { - return nil - } - msg := i18n.ActionFailed(verb, "") - return fmt.Errorf("%s: %w", msg, err) -} - -// ───────────────────────────────────────────────────────────────────────────── -// Error Helpers -// ───────────────────────────────────────────────────────────────────────────── - -// Is reports whether any error in err's tree matches target. -// This is a re-export of errors.Is for convenience. -func Is(err, target error) bool { - return errors.Is(err, target) -} - -// As finds the first error in err's tree that matches target. -// This is a re-export of errors.As for convenience. -func As(err error, target any) bool { - return errors.As(err, target) -} - -// Join returns an error that wraps the given errors. -// This is a re-export of errors.Join for convenience. -func Join(errs ...error) error { - return errors.Join(errs...) -} - -// ───────────────────────────────────────────────────────────────────────────── -// Fatal Functions (print and exit) -// ───────────────────────────────────────────────────────────────────────────── - -// Fatal prints an error message and exits with code 1. -func Fatal(err error) { - if err != nil { - fmt.Println(ErrorStyle.Render(Glyph(":cross:") + " " + err.Error())) - os.Exit(1) - } -} - -// Fatalf prints a formatted error message and exits with code 1. -func Fatalf(format string, args ...any) { - msg := fmt.Sprintf(format, args...) - fmt.Println(ErrorStyle.Render(Glyph(":cross:") + " " + msg)) - os.Exit(1) -} - -// FatalWrap prints a wrapped error message and exits with code 1. -// Does nothing if err is nil. -// -// cli.FatalWrap(err, "load config") // Prints "✗ load config: " and exits -func FatalWrap(err error, msg string) { - if err == nil { - return - } - fullMsg := fmt.Sprintf("%s: %v", msg, err) - fmt.Println(ErrorStyle.Render(Glyph(":cross:") + " " + fullMsg)) - os.Exit(1) -} - -// FatalWrapVerb prints a wrapped error using i18n grammar and exits with code 1. -// Does nothing if err is nil. -// -// cli.FatalWrapVerb(err, "load", "config") // Prints "✗ Failed to load config: " and exits -func FatalWrapVerb(err error, verb, subject string) { - if err == nil { - return - } - msg := i18n.ActionFailed(verb, subject) - fullMsg := fmt.Sprintf("%s: %v", msg, err) - fmt.Println(ErrorStyle.Render(Glyph(":cross:") + " " + fullMsg)) - os.Exit(1) -} diff --git a/pkg/cli/glyph.go b/pkg/cli/glyph.go deleted file mode 100644 index 28ca5fd..0000000 --- a/pkg/cli/glyph.go +++ /dev/null @@ -1,89 +0,0 @@ -package cli - -import ( - "bytes" - "unicode" -) - -// GlyphTheme defines which symbols to use. -type GlyphTheme int - -const ( - // ThemeUnicode uses standard Unicode symbols. - ThemeUnicode GlyphTheme = iota - // ThemeEmoji uses Emoji symbols. - ThemeEmoji - // ThemeASCII uses ASCII fallback symbols. - ThemeASCII -) - -var currentTheme = ThemeUnicode - -// UseUnicode switches the glyph theme to Unicode. -func UseUnicode() { currentTheme = ThemeUnicode } - -// UseEmoji switches the glyph theme to Emoji. -func UseEmoji() { currentTheme = ThemeEmoji } - -// UseASCII switches the glyph theme to ASCII. -func UseASCII() { currentTheme = ThemeASCII } - -func glyphMap() map[string]string { - switch currentTheme { - case ThemeEmoji: - return glyphMapEmoji - case ThemeASCII: - return glyphMapASCII - default: - return glyphMapUnicode - } -} - -// Glyph converts a shortcode (e.g. ":check:") to its symbol based on the current theme. -func Glyph(code string) string { - if sym, ok := glyphMap()[code]; ok { - return sym - } - return code -} - -func compileGlyphs(x string) string { - if x == "" { - return "" - } - input := bytes.NewBufferString(x) - output := bytes.NewBufferString("") - - for { - r, _, err := input.ReadRune() - if err != nil { - break - } - if r == ':' { - output.WriteString(replaceGlyph(input)) - } else { - output.WriteRune(r) - } - } - return output.String() -} - -func replaceGlyph(input *bytes.Buffer) string { - code := bytes.NewBufferString(":") - for { - r, _, err := input.ReadRune() - if err != nil { - return code.String() - } - if r == ':' && code.Len() == 1 { - return code.String() + replaceGlyph(input) - } - code.WriteRune(r) - if unicode.IsSpace(r) { - return code.String() - } - if r == ':' { - return Glyph(code.String()) - } - } -} \ No newline at end of file diff --git a/pkg/cli/glyph_maps.go b/pkg/cli/glyph_maps.go deleted file mode 100644 index 0aed5b8..0000000 --- a/pkg/cli/glyph_maps.go +++ /dev/null @@ -1,25 +0,0 @@ -package cli - -var glyphMapUnicode = map[string]string{ - ":check:": "✓", ":cross:": "✗", ":warn:": "⚠", ":info:": "ℹ", - ":question:": "?", ":skip:": "○", ":dot:": "●", ":circle:": "◯", - ":arrow_right:": "→", ":arrow_left:": "←", ":arrow_up:": "↑", ":arrow_down:": "↓", - ":pointer:": "▶", ":bullet:": "•", ":dash:": "─", ":pipe:": "│", - ":corner:": "└", ":tee:": "├", ":pending:": "…", ":spinner:": "⠋", -} - -var glyphMapEmoji = map[string]string{ - ":check:": "✅", ":cross:": "❌", ":warn:": "⚠️", ":info:": "ℹ️", - ":question:": "❓", ":skip:": "⏭️", ":dot:": "🔵", ":circle:": "⚪", - ":arrow_right:": "➡️", ":arrow_left:": "⬅️", ":arrow_up:": "⬆️", ":arrow_down:": "⬇️", - ":pointer:": "▶️", ":bullet:": "•", ":dash:": "─", ":pipe:": "│", - ":corner:": "└", ":tee:": "├", ":pending:": "⏳", ":spinner:": "🔄", -} - -var glyphMapASCII = map[string]string{ - ":check:": "[OK]", ":cross:": "[FAIL]", ":warn:": "[WARN]", ":info:": "[INFO]", - ":question:": "[?]", ":skip:": "[SKIP]", ":dot:": "[*]", ":circle:": "[ ]", - ":arrow_right:": "->", ":arrow_left:": "<-", ":arrow_up:": "^", ":arrow_down:": "v", - ":pointer:": ">", ":bullet:": "*", ":dash:": "-", ":pipe:": "|", - ":corner:": "`", ":tee:": "+", ":pending:": "...", ":spinner:": "-", -} diff --git a/pkg/cli/glyph_test.go b/pkg/cli/glyph_test.go deleted file mode 100644 index d43c0be..0000000 --- a/pkg/cli/glyph_test.go +++ /dev/null @@ -1,23 +0,0 @@ -package cli - -import "testing" - -func TestGlyph(t *testing.T) { - UseUnicode() - if Glyph(":check:") != "✓" { - t.Errorf("Expected ✓, got %s", Glyph(":check:")) - } - - UseASCII() - if Glyph(":check:") != "[OK]" { - t.Errorf("Expected [OK], got %s", Glyph(":check:")) - } -} - -func TestCompileGlyphs(t *testing.T) { - UseUnicode() - got := compileGlyphs("Status: :check:") - if got != "Status: ✓" { - t.Errorf("Expected Status: ✓, got %s", got) - } -} diff --git a/pkg/cli/i18n.go b/pkg/cli/i18n.go deleted file mode 100644 index 32e8055..0000000 --- a/pkg/cli/i18n.go +++ /dev/null @@ -1,170 +0,0 @@ -package cli - -import ( - "context" - "sync" - - "github.com/host-uk/core/pkg/framework" - "github.com/host-uk/core/pkg/i18n" -) - -// I18nService wraps i18n as a Core service. -type I18nService struct { - *framework.ServiceRuntime[I18nOptions] - svc *i18n.Service - - // Collect mode state - missingKeys []i18n.MissingKey - missingKeysMu sync.Mutex -} - -// I18nOptions configures the i18n service. -type I18nOptions struct { - // Language overrides auto-detection (e.g., "en-GB", "de") - Language string - // Mode sets the translation mode (Normal, Strict, Collect) - Mode i18n.Mode -} - -// NewI18nService creates an i18n service factory. -func NewI18nService(opts I18nOptions) func(*framework.Core) (any, error) { - return func(c *framework.Core) (any, error) { - svc, err := i18n.New() - if err != nil { - return nil, err - } - - if opts.Language != "" { - svc.SetLanguage(opts.Language) - } - - // Set mode if specified - svc.SetMode(opts.Mode) - - // Set as global default so i18n.T() works everywhere - i18n.SetDefault(svc) - - return &I18nService{ - ServiceRuntime: framework.NewServiceRuntime(c, opts), - svc: svc, - missingKeys: make([]i18n.MissingKey, 0), - }, nil - } -} - -// OnStartup initialises the i18n service. -func (s *I18nService) OnStartup(ctx context.Context) error { - s.Core().RegisterQuery(s.handleQuery) - - // Register action handler for collect mode - if s.svc.Mode() == i18n.ModeCollect { - i18n.OnMissingKey(s.handleMissingKey) - } - - return nil -} - -// handleMissingKey accumulates missing keys in collect mode. -func (s *I18nService) handleMissingKey(mk i18n.MissingKey) { - s.missingKeysMu.Lock() - defer s.missingKeysMu.Unlock() - s.missingKeys = append(s.missingKeys, mk) -} - -// MissingKeys returns all missing keys collected in collect mode. -// Call this at the end of a QA session to report missing translations. -func (s *I18nService) MissingKeys() []i18n.MissingKey { - s.missingKeysMu.Lock() - defer s.missingKeysMu.Unlock() - result := make([]i18n.MissingKey, len(s.missingKeys)) - copy(result, s.missingKeys) - return result -} - -// ClearMissingKeys resets the collected missing keys. -func (s *I18nService) ClearMissingKeys() { - s.missingKeysMu.Lock() - defer s.missingKeysMu.Unlock() - s.missingKeys = s.missingKeys[:0] -} - -// SetMode changes the translation mode. -func (s *I18nService) SetMode(mode i18n.Mode) { - s.svc.SetMode(mode) - - // Update action handler registration - if mode == i18n.ModeCollect { - i18n.OnMissingKey(s.handleMissingKey) - } else { - i18n.OnMissingKey(nil) - } -} - -// Mode returns the current translation mode. -func (s *I18nService) Mode() i18n.Mode { - return s.svc.Mode() -} - -// Queries for i18n service - -// QueryTranslate requests a translation. -type QueryTranslate struct { - Key string - Args map[string]any -} - -func (s *I18nService) handleQuery(c *framework.Core, q framework.Query) (any, bool, error) { - switch m := q.(type) { - case QueryTranslate: - return s.svc.T(m.Key, m.Args), true, nil - } - return nil, false, nil -} - -// T translates a key with optional arguments. -func (s *I18nService) T(key string, args ...map[string]any) string { - if len(args) > 0 { - return s.svc.T(key, args[0]) - } - return s.svc.T(key) -} - -// SetLanguage changes the current language. -func (s *I18nService) SetLanguage(lang string) { - s.svc.SetLanguage(lang) -} - -// Language returns the current language. -func (s *I18nService) Language() string { - return s.svc.Language() -} - -// AvailableLanguages returns all available languages. -func (s *I18nService) AvailableLanguages() []string { - return s.svc.AvailableLanguages() -} - -// --- Package-level convenience --- - -// T translates a key using the CLI's i18n service. -// Falls back to the global i18n.T if CLI not initialised. -func T(key string, args ...map[string]any) string { - if instance == nil { - // CLI not initialised, use global i18n - if len(args) > 0 { - return i18n.T(key, args[0]) - } - return i18n.T(key) - } - - svc, err := framework.ServiceFor[*I18nService](instance.core, "i18n") - if err != nil { - // i18n service not registered, use global - if len(args) > 0 { - return i18n.T(key, args[0]) - } - return i18n.T(key) - } - - return svc.T(key, args...) -} diff --git a/pkg/cli/layout.go b/pkg/cli/layout.go deleted file mode 100644 index d4feb57..0000000 --- a/pkg/cli/layout.go +++ /dev/null @@ -1,147 +0,0 @@ -package cli - -import "fmt" - -// Region represents one of the 5 HLCRF regions. -type Region rune - -const ( - // RegionHeader is the top region of the layout. - RegionHeader Region = 'H' - // RegionLeft is the left sidebar region. - RegionLeft Region = 'L' - // RegionContent is the main content region. - RegionContent Region = 'C' - // RegionRight is the right sidebar region. - RegionRight Region = 'R' - // RegionFooter is the bottom region of the layout. - RegionFooter Region = 'F' -) - -// Composite represents an HLCRF layout node. -type Composite struct { - variant string - path string - regions map[Region]*Slot - parent *Composite -} - -// Slot holds content for a region. -type Slot struct { - region Region - path string - blocks []Renderable - child *Composite -} - -// Renderable is anything that can be rendered to terminal. -type Renderable interface { - Render() string -} - -// StringBlock is a simple string that implements Renderable. -type StringBlock string - -// Render returns the string content. -func (s StringBlock) Render() string { return string(s) } - -// Layout creates a new layout from a variant string. -func Layout(variant string) *Composite { - c, err := ParseVariant(variant) - if err != nil { - return &Composite{variant: variant, regions: make(map[Region]*Slot)} - } - return c -} - -// ParseVariant parses a variant string like "H[LC]C[HCF]F". -func ParseVariant(variant string) (*Composite, error) { - c := &Composite{ - variant: variant, - path: "", - regions: make(map[Region]*Slot), - } - - i := 0 - for i < len(variant) { - r := Region(variant[i]) - if !isValidRegion(r) { - return nil, fmt.Errorf("invalid region: %c", r) - } - - slot := &Slot{region: r, path: string(r)} - c.regions[r] = slot - i++ - - if i < len(variant) && variant[i] == '[' { - end := findMatchingBracket(variant, i) - if end == -1 { - return nil, fmt.Errorf("unmatched bracket at %d", i) - } - nested, err := ParseVariant(variant[i+1 : end]) - if err != nil { - return nil, err - } - nested.path = string(r) + "-" - nested.parent = c - slot.child = nested - i = end + 1 - } - } - return c, nil -} - -func isValidRegion(r Region) bool { - return r == 'H' || r == 'L' || r == 'C' || r == 'R' || r == 'F' -} - -func findMatchingBracket(s string, start int) int { - depth := 0 - for i := start; i < len(s); i++ { - if s[i] == '[' { - depth++ - } else if s[i] == ']' { - depth-- - if depth == 0 { - return i - } - } - } - return -1 -} - -// H adds content to Header region. -func (c *Composite) H(items ...any) *Composite { c.addToRegion(RegionHeader, items...); return c } - -// L adds content to Left region. -func (c *Composite) L(items ...any) *Composite { c.addToRegion(RegionLeft, items...); return c } - -// C adds content to Content region. -func (c *Composite) C(items ...any) *Composite { c.addToRegion(RegionContent, items...); return c } - -// R adds content to Right region. -func (c *Composite) R(items ...any) *Composite { c.addToRegion(RegionRight, items...); return c } - -// F adds content to Footer region. -func (c *Composite) F(items ...any) *Composite { c.addToRegion(RegionFooter, items...); return c } - -func (c *Composite) addToRegion(r Region, items ...any) { - slot, ok := c.regions[r] - if !ok { - return - } - for _, item := range items { - slot.blocks = append(slot.blocks, toRenderable(item)) - } -} - -func toRenderable(item any) Renderable { - switch v := item.(type) { - case Renderable: - return v - case string: - return StringBlock(v) - default: - return StringBlock(fmt.Sprint(v)) - } -} \ No newline at end of file diff --git a/pkg/cli/layout_test.go b/pkg/cli/layout_test.go deleted file mode 100644 index a49504e..0000000 --- a/pkg/cli/layout_test.go +++ /dev/null @@ -1,25 +0,0 @@ -package cli - -import "testing" - -func TestParseVariant(t *testing.T) { - c, err := ParseVariant("H[LC]F") - if err != nil { - t.Fatalf("Parse failed: %v", err) - } - if _, ok := c.regions[RegionHeader]; !ok { - t.Error("Expected Header region") - } - if _, ok := c.regions[RegionFooter]; !ok { - t.Error("Expected Footer region") - } - - hSlot := c.regions[RegionHeader] - if hSlot.child == nil { - t.Error("Header should have child layout") - } else { - if _, ok := hSlot.child.regions[RegionLeft]; !ok { - t.Error("Child should have Left region") - } - } -} diff --git a/pkg/cli/log.go b/pkg/cli/log.go deleted file mode 100644 index 8b81dd7..0000000 --- a/pkg/cli/log.go +++ /dev/null @@ -1,90 +0,0 @@ -package cli - -import ( - "github.com/host-uk/core/pkg/framework" - "github.com/host-uk/core/pkg/log" -) - -// LogLevel aliases for backwards compatibility. -type LogLevel = log.Level - -const ( - LogLevelQuiet = log.LevelQuiet - LogLevelError = log.LevelError - LogLevelWarn = log.LevelWarn - LogLevelInfo = log.LevelInfo - LogLevelDebug = log.LevelDebug -) - -// LogService wraps log.Service with CLI styling. -type LogService struct { - *log.Service -} - -// LogOptions configures the log service. -type LogOptions = log.Options - -// NewLogService creates a log service factory with CLI styling. -func NewLogService(opts LogOptions) func(*framework.Core) (any, error) { - return func(c *framework.Core) (any, error) { - // Create the underlying service - factory := log.NewService(opts) - svc, err := factory(c) - if err != nil { - return nil, err - } - - logSvc := svc.(*log.Service) - - // Apply CLI styles - logSvc.StyleTimestamp = func(s string) string { return DimStyle.Render(s) } - logSvc.StyleDebug = func(s string) string { return DimStyle.Render(s) } - logSvc.StyleInfo = func(s string) string { return InfoStyle.Render(s) } - logSvc.StyleWarn = func(s string) string { return WarningStyle.Render(s) } - logSvc.StyleError = func(s string) string { return ErrorStyle.Render(s) } - - return &LogService{Service: logSvc}, nil - } -} - -// --- Package-level convenience --- - -// Log returns the CLI's log service, or nil if not available. -func Log() *LogService { - if instance == nil { - return nil - } - svc, err := framework.ServiceFor[*LogService](instance.core, "log") - if err != nil { - return nil - } - return svc -} - -// LogDebug logs a debug message if log service is available. -func LogDebug(msg string) { - if l := Log(); l != nil { - l.Debug(msg) - } -} - -// LogInfo logs an info message if log service is available. -func LogInfo(msg string) { - if l := Log(); l != nil { - l.Info(msg) - } -} - -// LogWarn logs a warning message if log service is available. -func LogWarn(msg string) { - if l := Log(); l != nil { - l.Warn(msg) - } -} - -// LogError logs an error message if log service is available. -func LogError(msg string) { - if l := Log(); l != nil { - l.Error(msg) - } -} diff --git a/pkg/cli/output.go b/pkg/cli/output.go deleted file mode 100644 index b785e96..0000000 --- a/pkg/cli/output.go +++ /dev/null @@ -1,166 +0,0 @@ -package cli - -import ( - "fmt" - "strings" - - "github.com/host-uk/core/pkg/i18n" -) - -// Blank prints an empty line. -func Blank() { - fmt.Println() -} - -// Echo translates a key via i18n.T and prints with newline. -// No automatic styling - use Success/Error/Warn/Info for styled output. -func Echo(key string, args ...any) { - fmt.Println(i18n.T(key, args...)) -} - -// Print outputs formatted text (no newline). -// Glyph shortcodes like :check: are converted. -func Print(format string, args ...any) { - fmt.Print(compileGlyphs(fmt.Sprintf(format, args...))) -} - -// Println outputs formatted text with newline. -// Glyph shortcodes like :check: are converted. -func Println(format string, args ...any) { - fmt.Println(compileGlyphs(fmt.Sprintf(format, args...))) -} - -// Text prints arguments like fmt.Println, but handling glyphs. -func Text(args ...any) { - fmt.Println(compileGlyphs(fmt.Sprint(args...))) -} - -// Success prints a success message with checkmark (green). -func Success(msg string) { - fmt.Println(SuccessStyle.Render(Glyph(":check:") + " " + msg)) -} - -// Successf prints a formatted success message. -func Successf(format string, args ...any) { - Success(fmt.Sprintf(format, args...)) -} - -// Error prints an error message with cross (red). -func Error(msg string) { - fmt.Println(ErrorStyle.Render(Glyph(":cross:") + " " + msg)) -} - -// Errorf prints a formatted error message. -func Errorf(format string, args ...any) { - Error(fmt.Sprintf(format, args...)) -} - -// Warn prints a warning message with warning symbol (amber). -func Warn(msg string) { - fmt.Println(WarningStyle.Render(Glyph(":warn:") + " " + msg)) -} - -// Warnf prints a formatted warning message. -func Warnf(format string, args ...any) { - Warn(fmt.Sprintf(format, args...)) -} - -// Info prints an info message with info symbol (blue). -func Info(msg string) { - fmt.Println(InfoStyle.Render(Glyph(":info:") + " " + msg)) -} - -// Infof prints a formatted info message. -func Infof(format string, args ...any) { - Info(fmt.Sprintf(format, args...)) -} - -// Dim prints dimmed text. -func Dim(msg string) { - fmt.Println(DimStyle.Render(msg)) -} - -// Progress prints a progress indicator that overwrites the current line. -// Uses i18n.Progress for gerund form ("Checking..."). -func Progress(verb string, current, total int, item ...string) { - msg := i18n.Progress(verb) - if len(item) > 0 && item[0] != "" { - fmt.Printf("\033[2K\r%s %d/%d %s", DimStyle.Render(msg), current, total, item[0]) - } else { - fmt.Printf("\033[2K\r%s %d/%d", DimStyle.Render(msg), current, total) - } -} - -// ProgressDone clears the progress line. -func ProgressDone() { - fmt.Print("\033[2K\r") -} - -// Label prints a "Label: value" line. -func Label(word, value string) { - fmt.Printf("%s %s\n", KeyStyle.Render(i18n.Label(word)), value) -} - -// Scanln reads from stdin. -func Scanln(a ...any) (int, error) { - return fmt.Scanln(a...) -} - -// Task prints a task header: "[label] message" -// -// cli.Task("php", "Running tests...") // [php] Running tests... -// cli.Task("go", i18n.Progress("build")) // [go] Building... -func Task(label, message string) { - fmt.Printf("%s %s\n\n", DimStyle.Render("["+label+"]"), message) -} - -// Section prints a section header: "── SECTION ──" -// -// cli.Section("audit") // ── AUDIT ── -func Section(name string) { - header := "── " + strings.ToUpper(name) + " ──" - fmt.Println(AccentStyle.Render(header)) -} - -// Hint prints a labelled hint: "label: message" -// -// cli.Hint("install", "composer require vimeo/psalm") -// cli.Hint("fix", "core php fmt --fix") -func Hint(label, message string) { - fmt.Printf(" %s %s\n", DimStyle.Render(label+":"), message) -} - -// Severity prints a severity-styled message. -// -// cli.Severity("critical", "SQL injection") // red, bold -// cli.Severity("high", "XSS vulnerability") // orange -// cli.Severity("medium", "Missing CSRF") // amber -// cli.Severity("low", "Debug enabled") // gray -func Severity(level, message string) { - var style *AnsiStyle - switch strings.ToLower(level) { - case "critical": - style = NewStyle().Bold().Foreground(ColourRed500) - case "high": - style = NewStyle().Bold().Foreground(ColourOrange500) - case "medium": - style = NewStyle().Foreground(ColourAmber500) - case "low": - style = NewStyle().Foreground(ColourGray500) - default: - style = DimStyle - } - fmt.Printf(" %s %s\n", style.Render("["+level+"]"), message) -} - -// Result prints a result line: "✓ message" or "✗ message" -// -// cli.Result(passed, "All tests passed") -// cli.Result(false, "3 tests failed") -func Result(passed bool, message string) { - if passed { - Success(message) - } else { - Error(message) - } -} \ No newline at end of file diff --git a/pkg/cli/output_test.go b/pkg/cli/output_test.go deleted file mode 100644 index 25f1cfe..0000000 --- a/pkg/cli/output_test.go +++ /dev/null @@ -1,98 +0,0 @@ -package cli - -import ( - "bytes" - "io" - "os" - "testing" -) - -func captureOutput(f func()) string { - old := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - f() - - w.Close() - os.Stdout = old - - var buf bytes.Buffer - io.Copy(&buf, r) - return buf.String() -} - -func TestSemanticOutput(t *testing.T) { - UseASCII() - - // Test Success - out := captureOutput(func() { - Success("done") - }) - if out == "" { - t.Error("Success output empty") - } - - // Test Error - out = captureOutput(func() { - Error("fail") - }) - if out == "" { - t.Error("Error output empty") - } - - // Test Warn - out = captureOutput(func() { - Warn("warn") - }) - if out == "" { - t.Error("Warn output empty") - } - - // Test Info - out = captureOutput(func() { - Info("info") - }) - if out == "" { - t.Error("Info output empty") - } - - // Test Task - out = captureOutput(func() { - Task("task", "msg") - }) - if out == "" { - t.Error("Task output empty") - } - - // Test Section - out = captureOutput(func() { - Section("section") - }) - if out == "" { - t.Error("Section output empty") - } - - // Test Hint - out = captureOutput(func() { - Hint("hint", "msg") - }) - if out == "" { - t.Error("Hint output empty") - } - - // Test Result - out = captureOutput(func() { - Result(true, "pass") - }) - if out == "" { - t.Error("Result(true) output empty") - } - - out = captureOutput(func() { - Result(false, "fail") - }) - if out == "" { - t.Error("Result(false) output empty") - } -} diff --git a/pkg/cli/prompt.go b/pkg/cli/prompt.go deleted file mode 100644 index 26a0b63..0000000 --- a/pkg/cli/prompt.go +++ /dev/null @@ -1,77 +0,0 @@ -package cli - -import ( - "bufio" - "fmt" - "os" - "strconv" - "strings" -) - -var stdin = bufio.NewReader(os.Stdin) - -// Prompt asks for text input with a default value. -func Prompt(label, defaultVal string) (string, error) { - if defaultVal != "" { - fmt.Printf("%s [%s]: ", label, defaultVal) - } else { - fmt.Printf("%s: ", label) - } - - input, err := stdin.ReadString('\n') - if err != nil { - return "", err - } - - input = strings.TrimSpace(input) - if input == "" { - return defaultVal, nil - } - return input, nil -} - - - -// Select presents numbered options and returns the selected value. -func Select(label string, options []string) (string, error) { - fmt.Println(label) - for i, opt := range options { - fmt.Printf(" %d. %s\n", i+1, opt) - } - fmt.Printf("Choose [1-%d]: ", len(options)) - - input, err := stdin.ReadString('\n') - if err != nil { - return "", err - } - - n, err := strconv.Atoi(strings.TrimSpace(input)) - if err != nil || n < 1 || n > len(options) { - return "", fmt.Errorf("invalid selection") - } - return options[n-1], nil -} - -// MultiSelect presents checkboxes (space-separated numbers). -func MultiSelect(label string, options []string) ([]string, error) { - fmt.Println(label) - for i, opt := range options { - fmt.Printf(" %d. %s\n", i+1, opt) - } - fmt.Printf("Choose (space-separated) [1-%d]: ", len(options)) - - input, err := stdin.ReadString('\n') - if err != nil { - return nil, err - } - - var selected []string - for _, s := range strings.Fields(input) { - n, err := strconv.Atoi(s) - if err != nil || n < 1 || n > len(options) { - continue - } - selected = append(selected, options[n-1]) - } - return selected, nil -} diff --git a/pkg/cli/render.go b/pkg/cli/render.go deleted file mode 100644 index d97b714..0000000 --- a/pkg/cli/render.go +++ /dev/null @@ -1,78 +0,0 @@ -package cli - -import ( - "fmt" - "strings" -) - -// RenderStyle controls how layouts are rendered. -type RenderStyle int - -const ( - RenderFlat RenderStyle = iota // No borders - RenderSimple // --- separators - RenderBoxed // Unicode box drawing -) - -var currentRenderStyle = RenderFlat - -func UseRenderFlat() { currentRenderStyle = RenderFlat } -func UseRenderSimple() { currentRenderStyle = RenderSimple } -func UseRenderBoxed() { currentRenderStyle = RenderBoxed } - -// Render outputs the layout to terminal. -func (c *Composite) Render() { - fmt.Print(c.String()) -} - -// String returns the rendered layout. -func (c *Composite) String() string { - var sb strings.Builder - c.renderTo(&sb, 0) - return sb.String() -} - -func (c *Composite) renderTo(sb *strings.Builder, depth int) { - order := []Region{RegionHeader, RegionLeft, RegionContent, RegionRight, RegionFooter} - - var active []Region - for _, r := range order { - if slot, ok := c.regions[r]; ok { - if len(slot.blocks) > 0 || slot.child != nil { - active = append(active, r) - } - } - } - - for i, r := range active { - slot := c.regions[r] - if i > 0 && currentRenderStyle != RenderFlat { - c.renderSeparator(sb, depth) - } - c.renderSlot(sb, slot, depth) - } -} - -func (c *Composite) renderSeparator(sb *strings.Builder, depth int) { - indent := strings.Repeat(" ", depth) - switch currentRenderStyle { - case RenderBoxed: - sb.WriteString(indent + "├" + strings.Repeat("─", 40) + "┤\n") - case RenderSimple: - sb.WriteString(indent + strings.Repeat("─", 40) + "\n") - } -} - -func (c *Composite) renderSlot(sb *strings.Builder, slot *Slot, depth int) { - indent := strings.Repeat(" ", depth) - for _, block := range slot.blocks { - for _, line := range strings.Split(block.Render(), "\n") { - if line != "" { - sb.WriteString(indent + line + "\n") - } - } - } - if slot.child != nil { - slot.child.renderTo(sb, depth+1) - } -} diff --git a/pkg/cli/runtime.go b/pkg/cli/runtime.go deleted file mode 100644 index 1e14e71..0000000 --- a/pkg/cli/runtime.go +++ /dev/null @@ -1,217 +0,0 @@ -// Package cli provides the CLI runtime and utilities. -// -// The CLI uses the Core framework for its own runtime. Usage is simple: -// -// cli.Init(cli.Options{AppName: "core"}) -// defer cli.Shutdown() -// -// cli.Success("Done!") -// cli.Error("Failed") -// if cli.Confirm("Proceed?") { ... } -// -// // When you need the Core instance -// c := cli.Core() -package cli - -import ( - "context" - "fmt" - "os" - "os/signal" - "sync" - "syscall" - - "github.com/host-uk/core/pkg/framework" - "github.com/spf13/cobra" -) - -var ( - instance *runtime - once sync.Once -) - -// runtime is the CLI's internal Core runtime. -type runtime struct { - core *framework.Core - root *cobra.Command - ctx context.Context - cancel context.CancelFunc -} - -// Options configures the CLI runtime. -type Options struct { - AppName string - Version string - Services []framework.Option // Additional services to register - - // OnReload is called when SIGHUP is received (daemon mode). - // Use for configuration reloading. Leave nil to ignore SIGHUP. - OnReload func() error -} - -// Init initialises the global CLI runtime. -// Call this once at startup (typically in main.go or cmd.Execute). -func Init(opts Options) error { - var initErr error - once.Do(func() { - ctx, cancel := context.WithCancel(context.Background()) - - // Create root command - rootCmd := &cobra.Command{ - Use: opts.AppName, - Version: opts.Version, - } - - // Attach all registered commands - attachRegisteredCommands(rootCmd) - - // Build signal service options - var signalOpts []SignalOption - if opts.OnReload != nil { - signalOpts = append(signalOpts, WithReloadHandler(opts.OnReload)) - } - - // Build options: app, signal service + any additional services - coreOpts := []framework.Option{ - framework.WithApp(rootCmd), - framework.WithName("signal", newSignalService(cancel, signalOpts...)), - } - coreOpts = append(coreOpts, opts.Services...) - coreOpts = append(coreOpts, framework.WithServiceLock()) - - c, err := framework.New(coreOpts...) - if err != nil { - initErr = err - cancel() - return - } - - instance = &runtime{ - core: c, - root: rootCmd, - ctx: ctx, - cancel: cancel, - } - - if err := c.ServiceStartup(ctx, nil); err != nil { - initErr = err - return - } - }) - return initErr -} - -func mustInit() { - if instance == nil { - panic("cli not initialised - call cli.Init() first") - } -} - -// --- Core Access --- - -// Core returns the CLI's framework Core instance. -func Core() *framework.Core { - mustInit() - return instance.core -} - -// RootCmd returns the CLI's root cobra command. -func RootCmd() *cobra.Command { - mustInit() - return instance.root -} - -// Execute runs the CLI root command. -// Returns an error if the command fails. -func Execute() error { - mustInit() - return instance.root.Execute() -} - -// Context returns the CLI's root context. -// Cancelled on SIGINT/SIGTERM. -func Context() context.Context { - mustInit() - return instance.ctx -} - -// Shutdown gracefully shuts down the CLI. -func Shutdown() { - if instance == nil { - return - } - instance.cancel() - instance.core.ServiceShutdown(instance.ctx) -} - - - -// --- Signal Service (internal) --- - -type signalService struct { - cancel context.CancelFunc - sigChan chan os.Signal - onReload func() error -} - -// SignalOption configures signal handling. -type SignalOption func(*signalService) - -// WithReloadHandler sets a callback for SIGHUP. -func WithReloadHandler(fn func() error) SignalOption { - return func(s *signalService) { - s.onReload = fn - } -} - -func newSignalService(cancel context.CancelFunc, opts ...SignalOption) func(*framework.Core) (any, error) { - return func(c *framework.Core) (any, error) { - svc := &signalService{ - cancel: cancel, - sigChan: make(chan os.Signal, 1), - } - for _, opt := range opts { - opt(svc) - } - return svc, nil - } -} - -func (s *signalService) OnStartup(ctx context.Context) error { - signals := []os.Signal{syscall.SIGINT, syscall.SIGTERM} - if s.onReload != nil { - signals = append(signals, syscall.SIGHUP) - } - signal.Notify(s.sigChan, signals...) - - go func() { - for { - select { - case sig := <-s.sigChan: - switch sig { - case syscall.SIGHUP: - if s.onReload != nil { - if err := s.onReload(); err != nil { - LogError(fmt.Sprintf("reload failed: %v", err)) - } else { - LogInfo("configuration reloaded") - } - } - case syscall.SIGINT, syscall.SIGTERM: - s.cancel() - return - } - case <-ctx.Done(): - return - } - } - }() - - return nil -} - -func (s *signalService) OnShutdown(ctx context.Context) error { - signal.Stop(s.sigChan) - close(s.sigChan) - return nil -} diff --git a/pkg/cli/strings.go b/pkg/cli/strings.go deleted file mode 100644 index 9e4240b..0000000 --- a/pkg/cli/strings.go +++ /dev/null @@ -1,48 +0,0 @@ -package cli - -import "fmt" - -// Sprintf formats a string (fmt.Sprintf wrapper). -func Sprintf(format string, args ...any) string { - return fmt.Sprintf(format, args...) -} - -// Sprint formats using default formats (fmt.Sprint wrapper). -func Sprint(args ...any) string { - return fmt.Sprint(args...) -} - -// Styled returns text with a style applied. -func Styled(style *AnsiStyle, text string) string { - return style.Render(text) -} - -// Styledf returns formatted text with a style applied. -func Styledf(style *AnsiStyle, format string, args ...any) string { - return style.Render(fmt.Sprintf(format, args...)) -} - -// SuccessStr returns success-styled string. -func SuccessStr(msg string) string { - return SuccessStyle.Render(Glyph(":check:") + " " + msg) -} - -// ErrorStr returns error-styled string. -func ErrorStr(msg string) string { - return ErrorStyle.Render(Glyph(":cross:") + " " + msg) -} - -// WarnStr returns warning-styled string. -func WarnStr(msg string) string { - return WarningStyle.Render(Glyph(":warn:") + " " + msg) -} - -// InfoStr returns info-styled string. -func InfoStr(msg string) string { - return InfoStyle.Render(Glyph(":info:") + " " + msg) -} - -// DimStr returns dim-styled string. -func DimStr(msg string) string { - return DimStyle.Render(msg) -} \ No newline at end of file diff --git a/pkg/cli/styles.go b/pkg/cli/styles.go deleted file mode 100644 index 985d3de..0000000 --- a/pkg/cli/styles.go +++ /dev/null @@ -1,209 +0,0 @@ -// Package cli provides semantic CLI output with zero external dependencies. -package cli - -import ( - "fmt" - "strings" - "time" -) - -// Tailwind colour palette (hex strings) -const ( - ColourBlue50 = "#eff6ff" - ColourBlue100 = "#dbeafe" - ColourBlue200 = "#bfdbfe" - ColourBlue300 = "#93c5fd" - ColourBlue400 = "#60a5fa" - ColourBlue500 = "#3b82f6" - ColourBlue600 = "#2563eb" - ColourBlue700 = "#1d4ed8" - ColourGreen400 = "#4ade80" - ColourGreen500 = "#22c55e" - ColourGreen600 = "#16a34a" - ColourRed400 = "#f87171" - ColourRed500 = "#ef4444" - ColourRed600 = "#dc2626" - ColourAmber400 = "#fbbf24" - ColourAmber500 = "#f59e0b" - ColourAmber600 = "#d97706" - ColourOrange500 = "#f97316" - ColourYellow500 = "#eab308" - ColourEmerald500= "#10b981" - ColourPurple500 = "#a855f7" - ColourViolet400 = "#a78bfa" - ColourViolet500 = "#8b5cf6" - ColourIndigo500 = "#6366f1" - ColourCyan500 = "#06b6d4" - ColourGray50 = "#f9fafb" - ColourGray100 = "#f3f4f6" - ColourGray200 = "#e5e7eb" - ColourGray300 = "#d1d5db" - ColourGray400 = "#9ca3af" - ColourGray500 = "#6b7280" - ColourGray600 = "#4b5563" - ColourGray700 = "#374151" - ColourGray800 = "#1f2937" - ColourGray900 = "#111827" -) - -// Core styles -var ( - SuccessStyle = NewStyle().Bold().Foreground(ColourGreen500) - ErrorStyle = NewStyle().Bold().Foreground(ColourRed500) - WarningStyle = NewStyle().Bold().Foreground(ColourAmber500) - InfoStyle = NewStyle().Foreground(ColourBlue400) - DimStyle = NewStyle().Dim().Foreground(ColourGray500) - MutedStyle = NewStyle().Foreground(ColourGray600) - BoldStyle = NewStyle().Bold() - KeyStyle = NewStyle().Foreground(ColourGray400) - ValueStyle = NewStyle().Foreground(ColourGray200) - AccentStyle = NewStyle().Foreground(ColourCyan500) - LinkStyle = NewStyle().Foreground(ColourBlue500).Underline() - HeaderStyle = NewStyle().Bold().Foreground(ColourGray200) - TitleStyle = NewStyle().Bold().Foreground(ColourBlue500) - CodeStyle = NewStyle().Foreground(ColourGray300) - NumberStyle = NewStyle().Foreground(ColourBlue300) - RepoStyle = NewStyle().Bold().Foreground(ColourBlue500) -) - -// Truncate shortens a string to max length with ellipsis. -func Truncate(s string, max int) string { - if len(s) <= max { - return s - } - if max <= 3 { - return s[:max] - } - return s[:max-3] + "..." -} - -// Pad right-pads a string to width. -func Pad(s string, width int) string { - if len(s) >= width { - return s - } - return s + strings.Repeat(" ", width-len(s)) -} - -// FormatAge formats a time as human-readable age (e.g., "2h ago", "3d ago"). -func FormatAge(t time.Time) string { - d := time.Since(t) - switch { - case d < time.Minute: - return "just now" - case d < time.Hour: - return fmt.Sprintf("%dm ago", int(d.Minutes())) - case d < 24*time.Hour: - return fmt.Sprintf("%dh ago", int(d.Hours())) - case d < 7*24*time.Hour: - return fmt.Sprintf("%dd ago", int(d.Hours()/24)) - case d < 30*24*time.Hour: - return fmt.Sprintf("%dw ago", int(d.Hours()/(24*7))) - default: - return fmt.Sprintf("%dmo ago", int(d.Hours()/(24*30))) - } -} - -// Table renders tabular data with aligned columns. -// HLCRF is for layout; Table is for tabular data - they serve different purposes. -type Table struct { - Headers []string - Rows [][]string - Style TableStyle -} - -type TableStyle struct { - HeaderStyle *AnsiStyle - CellStyle *AnsiStyle - Separator string -} - -// DefaultTableStyle returns sensible defaults. -func DefaultTableStyle() TableStyle { - return TableStyle{ - HeaderStyle: HeaderStyle, - CellStyle: nil, - Separator: " ", - } -} - -// NewTable creates a table with headers. -func NewTable(headers ...string) *Table { - return &Table{ - Headers: headers, - Style: DefaultTableStyle(), - } -} - -// AddRow adds a row to the table. -func (t *Table) AddRow(cells ...string) *Table { - t.Rows = append(t.Rows, cells) - return t -} - -// String renders the table. -func (t *Table) String() string { - if len(t.Headers) == 0 && len(t.Rows) == 0 { - return "" - } - - // Calculate column widths - cols := len(t.Headers) - if cols == 0 && len(t.Rows) > 0 { - cols = len(t.Rows[0]) - } - widths := make([]int, cols) - - for i, h := range t.Headers { - if len(h) > widths[i] { - widths[i] = len(h) - } - } - for _, row := range t.Rows { - for i, cell := range row { - if i < cols && len(cell) > widths[i] { - widths[i] = len(cell) - } - } - } - - var sb strings.Builder - sep := t.Style.Separator - - // Headers - if len(t.Headers) > 0 { - for i, h := range t.Headers { - if i > 0 { - sb.WriteString(sep) - } - styled := Pad(h, widths[i]) - if t.Style.HeaderStyle != nil { - styled = t.Style.HeaderStyle.Render(styled) - } - sb.WriteString(styled) - } - sb.WriteString("\n") - } - - // Rows - for _, row := range t.Rows { - for i, cell := range row { - if i > 0 { - sb.WriteString(sep) - } - styled := Pad(cell, widths[i]) - if t.Style.CellStyle != nil { - styled = t.Style.CellStyle.Render(styled) - } - sb.WriteString(styled) - } - sb.WriteString("\n") - } - - return sb.String() -} - -// Render prints the table to stdout. -func (t *Table) Render() { - fmt.Print(t.String()) -} \ No newline at end of file diff --git a/pkg/cli/utils.go b/pkg/cli/utils.go deleted file mode 100644 index e7294aa..0000000 --- a/pkg/cli/utils.go +++ /dev/null @@ -1,500 +0,0 @@ -package cli - -import ( - "bufio" - "context" - "fmt" - "os" - "os/exec" - "strings" - "time" - - "github.com/host-uk/core/pkg/i18n" -) - -// GhAuthenticated checks if the GitHub CLI is authenticated. -// Returns true if 'gh auth status' indicates a logged-in user. -func GhAuthenticated() bool { - cmd := exec.Command("gh", "auth", "status") - output, _ := cmd.CombinedOutput() - return strings.Contains(string(output), "Logged in") -} - - - -// ConfirmOption configures Confirm behaviour. -type ConfirmOption func(*confirmConfig) - -type confirmConfig struct { - defaultYes bool - required bool - timeout time.Duration -} - -// DefaultYes sets the default response to "yes" (pressing Enter confirms). -func DefaultYes() ConfirmOption { - return func(c *confirmConfig) { - c.defaultYes = true - } -} - -// Required prevents empty responses; user must explicitly type y/n. -func Required() ConfirmOption { - return func(c *confirmConfig) { - c.required = true - } -} - -// Timeout sets a timeout after which the default response is auto-selected. -// If no default is set (not Required and not DefaultYes), defaults to "no". -// -// Confirm("Continue?", Timeout(30*time.Second)) // Auto-no after 30s -// Confirm("Continue?", DefaultYes(), Timeout(10*time.Second)) // Auto-yes after 10s -func Timeout(d time.Duration) ConfirmOption { - return func(c *confirmConfig) { - c.timeout = d - } -} - -// Confirm prompts the user for yes/no confirmation. -// Returns true if the user enters "y" or "yes" (case-insensitive). -// -// Basic usage: -// -// if Confirm("Delete file?") { ... } -// -// With options: -// -// if Confirm("Save changes?", DefaultYes()) { ... } -// if Confirm("Dangerous!", Required()) { ... } -// if Confirm("Auto-continue?", Timeout(30*time.Second)) { ... } -func Confirm(prompt string, opts ...ConfirmOption) bool { - cfg := &confirmConfig{} - for _, opt := range opts { - opt(cfg) - } - - // Build the prompt suffix - var suffix string - if cfg.required { - suffix = "[y/n] " - } else if cfg.defaultYes { - suffix = "[Y/n] " - } else { - suffix = "[y/N] " - } - - // Add timeout indicator if set - if cfg.timeout > 0 { - suffix = fmt.Sprintf("%s(auto in %s) ", suffix, cfg.timeout.Round(time.Second)) - } - - reader := bufio.NewReader(os.Stdin) - - for { - fmt.Printf("%s %s", prompt, suffix) - - var response string - - if cfg.timeout > 0 { - // Use timeout-based reading - resultChan := make(chan string, 1) - go func() { - line, _ := reader.ReadString('\n') - resultChan <- line - }() - - select { - case response = <-resultChan: - response = strings.ToLower(strings.TrimSpace(response)) - case <-time.After(cfg.timeout): - fmt.Println() // New line after timeout - return cfg.defaultYes - } - } else { - response, _ = reader.ReadString('\n') - response = strings.ToLower(strings.TrimSpace(response)) - } - - // Handle empty response - if response == "" { - if cfg.required { - continue // Ask again - } - return cfg.defaultYes - } - - // Check for yes/no responses - if response == "y" || response == "yes" { - return true - } - if response == "n" || response == "no" { - return false - } - - // Invalid response - if cfg.required { - fmt.Println("Please enter 'y' or 'n'") - continue - } - - // Non-required: treat invalid as default - return cfg.defaultYes - } -} - -// ConfirmAction prompts for confirmation of an action using grammar composition. -// -// if ConfirmAction("delete", "config.yaml") { ... } -// if ConfirmAction("save", "changes", DefaultYes()) { ... } -func ConfirmAction(verb, subject string, opts ...ConfirmOption) bool { - question := i18n.Title(verb) + " " + subject + "?" - return Confirm(question, opts...) -} - -// ConfirmDangerousAction prompts for double confirmation of a dangerous action. -// Shows initial question, then a "Really verb subject?" confirmation. -// -// if ConfirmDangerousAction("delete", "config.yaml") { ... } -func ConfirmDangerousAction(verb, subject string) bool { - question := i18n.Title(verb) + " " + subject + "?" - if !Confirm(question, Required()) { - return false - } - - confirm := "Really " + verb + " " + subject + "?" - return Confirm(confirm, Required()) -} - -// QuestionOption configures Question behaviour. -type QuestionOption func(*questionConfig) - -type questionConfig struct { - defaultValue string - required bool - validator func(string) error -} - -// WithDefault sets the default value shown in brackets. -func WithDefault(value string) QuestionOption { - return func(c *questionConfig) { - c.defaultValue = value - } -} - -// WithValidator adds a validation function for the response. -func WithValidator(fn func(string) error) QuestionOption { - return func(c *questionConfig) { - c.validator = fn - } -} - -// RequiredInput prevents empty responses. -func RequiredInput() QuestionOption { - return func(c *questionConfig) { - c.required = true - } -} - -// Question prompts the user for text input. -// -// name := Question("Enter your name:") -// name := Question("Enter your name:", WithDefault("Anonymous")) -// name := Question("Enter your name:", RequiredInput()) -func Question(prompt string, opts ...QuestionOption) string { - cfg := &questionConfig{} - for _, opt := range opts { - opt(cfg) - } - - reader := bufio.NewReader(os.Stdin) - - for { - // Build prompt with default - if cfg.defaultValue != "" { - fmt.Printf("%s [%s] ", prompt, cfg.defaultValue) - } else { - fmt.Printf("%s ", prompt) - } - - response, _ := reader.ReadString('\n') - response = strings.TrimSpace(response) - - // Handle empty response - if response == "" { - if cfg.required { - fmt.Println("Response required") - continue - } - response = cfg.defaultValue - } - - // Validate if validator provided - if cfg.validator != nil { - if err := cfg.validator(response); err != nil { - fmt.Printf("Invalid: %v\n", err) - continue - } - } - - return response - } -} - -// QuestionAction prompts for text input using grammar composition. -// -// name := QuestionAction("rename", "old.txt") -func QuestionAction(verb, subject string, opts ...QuestionOption) string { - question := i18n.Title(verb) + " " + subject + "?" - return Question(question, opts...) -} - -// ChooseOption configures Choose behaviour. -type ChooseOption[T any] func(*chooseConfig[T]) - -type chooseConfig[T any] struct { - displayFn func(T) string - defaultN int // 0-based index of default selection - filter bool // Enable fuzzy filtering - multi bool // Allow multiple selection -} - -// WithDisplay sets a custom display function for items. -func WithDisplay[T any](fn func(T) string) ChooseOption[T] { - return func(c *chooseConfig[T]) { - c.displayFn = fn - } -} - -// WithDefaultIndex sets the default selection index (0-based). -func WithDefaultIndex[T any](idx int) ChooseOption[T] { - return func(c *chooseConfig[T]) { - c.defaultN = idx - } -} - -// Filter enables type-to-filter functionality. -// Users can type to narrow down the list of options. -// Note: This is a hint for interactive UIs; the basic CLI Choose -// implementation uses numbered selection which doesn't support filtering. -func Filter[T any]() ChooseOption[T] { - return func(c *chooseConfig[T]) { - c.filter = true - } -} - -// Multi allows multiple selections. -// Use ChooseMulti instead of Choose when this option is needed. -func Multi[T any]() ChooseOption[T] { - return func(c *chooseConfig[T]) { - c.multi = true - } -} - -// Display sets a custom display function for items. -// Alias for WithDisplay for shorter syntax. -// -// Choose("Select:", items, Display(func(f File) string { return f.Name })) -func Display[T any](fn func(T) string) ChooseOption[T] { - return WithDisplay[T](fn) -} - -// Choose prompts the user to select from a list of items. -// Returns the selected item. Uses simple numbered selection for terminal compatibility. -// -// choice := Choose("Select a file:", files) -// choice := Choose("Select a file:", files, WithDisplay(func(f File) string { return f.Name })) -func Choose[T any](prompt string, items []T, opts ...ChooseOption[T]) T { - var zero T - if len(items) == 0 { - return zero - } - - cfg := &chooseConfig[T]{ - displayFn: func(item T) string { return fmt.Sprint(item) }, - } - for _, opt := range opts { - opt(cfg) - } - - // Display options - fmt.Println(prompt) - for i, item := range items { - marker := " " - if i == cfg.defaultN { - marker = "*" - } - fmt.Printf(" %s%d. %s\n", marker, i+1, cfg.displayFn(item)) - } - - reader := bufio.NewReader(os.Stdin) - - for { - fmt.Printf("Enter number [1-%d]: ", len(items)) - response, _ := reader.ReadString('\n') - response = strings.TrimSpace(response) - - // Empty response uses default - if response == "" { - return items[cfg.defaultN] - } - - // Parse number - var n int - if _, err := fmt.Sscanf(response, "%d", &n); err == nil { - if n >= 1 && n <= len(items) { - return items[n-1] - } - } - - fmt.Printf("Please enter a number between 1 and %d\n", len(items)) - } -} - -// ChooseAction prompts for selection using grammar composition. -// -// file := ChooseAction("select", "file", files) -func ChooseAction[T any](verb, subject string, items []T, opts ...ChooseOption[T]) T { - question := i18n.Title(verb) + " " + subject + ":" - return Choose(question, items, opts...) -} - -// ChooseMulti prompts the user to select multiple items from a list. -// Returns the selected items. Uses space-separated numbers or ranges. -// -// choices := ChooseMulti("Select files:", files) -// choices := ChooseMulti("Select files:", files, WithDisplay(func(f File) string { return f.Name })) -// -// Input format: -// - "1 3 5" - select items 1, 3, and 5 -// - "1-3" - select items 1, 2, and 3 -// - "1 3-5" - select items 1, 3, 4, and 5 -// - "" (empty) - select none -func ChooseMulti[T any](prompt string, items []T, opts ...ChooseOption[T]) []T { - if len(items) == 0 { - return nil - } - - cfg := &chooseConfig[T]{ - displayFn: func(item T) string { return fmt.Sprint(item) }, - } - for _, opt := range opts { - opt(cfg) - } - - // Display options - fmt.Println(prompt) - for i, item := range items { - fmt.Printf(" %d. %s\n", i+1, cfg.displayFn(item)) - } - - reader := bufio.NewReader(os.Stdin) - - for { - fmt.Printf("Enter numbers (e.g., 1 3 5 or 1-3) or empty for none: ") - response, _ := reader.ReadString('\n') - response = strings.TrimSpace(response) - - // Empty response returns no selections - if response == "" { - return nil - } - - // Parse the selection - selected, err := parseMultiSelection(response, len(items)) - if err != nil { - fmt.Printf("Invalid selection: %v\n", err) - continue - } - - // Build result - result := make([]T, 0, len(selected)) - for _, idx := range selected { - result = append(result, items[idx]) - } - return result - } -} - -// parseMultiSelection parses a multi-selection string like "1 3 5" or "1-3 5". -// Returns 0-based indices. -func parseMultiSelection(input string, maxItems int) ([]int, error) { - selected := make(map[int]bool) - parts := strings.Fields(input) - - for _, part := range parts { - // Check for range (e.g., "1-3") - if strings.Contains(part, "-") { - rangeParts := strings.Split(part, "-") - if len(rangeParts) != 2 { - return nil, fmt.Errorf("invalid range: %s", part) - } - var start, end int - if _, err := fmt.Sscanf(rangeParts[0], "%d", &start); err != nil { - return nil, fmt.Errorf("invalid range start: %s", rangeParts[0]) - } - if _, err := fmt.Sscanf(rangeParts[1], "%d", &end); err != nil { - return nil, fmt.Errorf("invalid range end: %s", rangeParts[1]) - } - if start < 1 || start > maxItems || end < 1 || end > maxItems || start > end { - return nil, fmt.Errorf("range out of bounds: %s", part) - } - for i := start; i <= end; i++ { - selected[i-1] = true // Convert to 0-based - } - } else { - // Single number - var n int - if _, err := fmt.Sscanf(part, "%d", &n); err != nil { - return nil, fmt.Errorf("invalid number: %s", part) - } - if n < 1 || n > maxItems { - return nil, fmt.Errorf("number out of range: %d", n) - } - selected[n-1] = true // Convert to 0-based - } - } - - // Convert map to sorted slice - result := make([]int, 0, len(selected)) - for i := 0; i < maxItems; i++ { - if selected[i] { - result = append(result, i) - } - } - return result, nil -} - -// ChooseMultiAction prompts for multiple selections using grammar composition. -// -// files := ChooseMultiAction("select", "files", files) -func ChooseMultiAction[T any](verb, subject string, items []T, opts ...ChooseOption[T]) []T { - question := i18n.Title(verb) + " " + subject + ":" - return ChooseMulti(question, items, opts...) -} - - - -// GitClone clones a GitHub repository to the specified path. -// Prefers 'gh repo clone' if authenticated, falls back to SSH. -func GitClone(ctx context.Context, org, repo, path string) error { - if GhAuthenticated() { - httpsURL := fmt.Sprintf("https://github.com/%s/%s.git", org, repo) - cmd := exec.CommandContext(ctx, "gh", "repo", "clone", httpsURL, path) - output, err := cmd.CombinedOutput() - if err == nil { - return nil - } - errStr := strings.TrimSpace(string(output)) - if strings.Contains(errStr, "already exists") { - return fmt.Errorf("%s", errStr) - } - } - // Fall back to SSH clone - cmd := exec.CommandContext(ctx, "git", "clone", fmt.Sprintf("git@github.com:%s/%s.git", org, repo), path) - output, err := cmd.CombinedOutput() - if err != nil { - return fmt.Errorf("%s", strings.TrimSpace(string(output))) - } - return nil -} diff --git a/pkg/container/container.go b/pkg/container/container.go deleted file mode 100644 index d7161c3..0000000 --- a/pkg/container/container.go +++ /dev/null @@ -1,106 +0,0 @@ -// Package container provides a runtime for managing LinuxKit containers. -// It supports running LinuxKit images (ISO, qcow2, vmdk, raw) using -// available hypervisors (QEMU on Linux, Hyperkit on macOS). -package container - -import ( - "context" - "crypto/rand" - "encoding/hex" - "io" - "time" -) - -// Container represents a running LinuxKit container/VM instance. -type Container struct { - // ID is a unique identifier for the container (8 character hex string). - ID string `json:"id"` - // Name is the optional human-readable name for the container. - Name string `json:"name,omitempty"` - // Image is the path to the LinuxKit image being run. - Image string `json:"image"` - // Status represents the current state of the container. - Status Status `json:"status"` - // PID is the process ID of the hypervisor running this container. - PID int `json:"pid"` - // StartedAt is when the container was started. - StartedAt time.Time `json:"started_at"` - // Ports maps host ports to container ports. - Ports map[int]int `json:"ports,omitempty"` - // Memory is the amount of memory allocated in MB. - Memory int `json:"memory,omitempty"` - // CPUs is the number of CPUs allocated. - CPUs int `json:"cpus,omitempty"` -} - -// Status represents the state of a container. -type Status string - -const ( - // StatusRunning indicates the container is running. - StatusRunning Status = "running" - // StatusStopped indicates the container has stopped. - StatusStopped Status = "stopped" - // StatusError indicates the container encountered an error. - StatusError Status = "error" -) - -// RunOptions configures how a container should be run. -type RunOptions struct { - // Name is an optional human-readable name for the container. - Name string - // Detach runs the container in the background. - Detach bool - // Memory is the amount of memory to allocate in MB (default: 1024). - Memory int - // CPUs is the number of CPUs to allocate (default: 1). - CPUs int - // Ports maps host ports to container ports. - Ports map[int]int - // Volumes maps host paths to container paths. - Volumes map[string]string - // SSHPort is the port to use for SSH access (default: 2222). - SSHPort int - // SSHKey is the path to the SSH private key for exec commands. - SSHKey string -} - -// Manager defines the interface for container lifecycle management. -type Manager interface { - // Run starts a new container from the given image. - Run(ctx context.Context, image string, opts RunOptions) (*Container, error) - // Stop stops a running container by ID. - Stop(ctx context.Context, id string) error - // List returns all known containers. - List(ctx context.Context) ([]*Container, error) - // Logs returns a reader for the container's log output. - // If follow is true, the reader will continue to stream new log entries. - Logs(ctx context.Context, id string, follow bool) (io.ReadCloser, error) - // Exec executes a command inside the container via SSH. - Exec(ctx context.Context, id string, cmd []string) error -} - -// GenerateID creates a new unique container ID (8 hex characters). -func GenerateID() (string, error) { - bytes := make([]byte, 4) - if _, err := rand.Read(bytes); err != nil { - return "", err - } - return hex.EncodeToString(bytes), nil -} - -// ImageFormat represents the format of a LinuxKit image. -type ImageFormat string - -const ( - // FormatISO is an ISO image format. - FormatISO ImageFormat = "iso" - // FormatQCOW2 is a QEMU Copy-On-Write image format. - FormatQCOW2 ImageFormat = "qcow2" - // FormatVMDK is a VMware disk image format. - FormatVMDK ImageFormat = "vmdk" - // FormatRaw is a raw disk image format. - FormatRaw ImageFormat = "raw" - // FormatUnknown indicates an unknown image format. - FormatUnknown ImageFormat = "unknown" -) diff --git a/pkg/container/hypervisor.go b/pkg/container/hypervisor.go deleted file mode 100644 index b5c1e5f..0000000 --- a/pkg/container/hypervisor.go +++ /dev/null @@ -1,273 +0,0 @@ -package container - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "runtime" - "strings" -) - -// Hypervisor defines the interface for VM hypervisors. -type Hypervisor interface { - // Name returns the name of the hypervisor. - Name() string - // Available checks if the hypervisor is available on the system. - Available() bool - // BuildCommand builds the command to run a VM with the given options. - BuildCommand(ctx context.Context, image string, opts *HypervisorOptions) (*exec.Cmd, error) -} - -// HypervisorOptions contains options for running a VM. -type HypervisorOptions struct { - // Memory in MB. - Memory int - // CPUs count. - CPUs int - // LogFile path for output. - LogFile string - // SSHPort for SSH access. - SSHPort int - // Ports maps host ports to guest ports. - Ports map[int]int - // Volumes maps host paths to guest paths (9p shares). - Volumes map[string]string - // Detach runs in background (nographic mode). - Detach bool -} - -// QemuHypervisor implements Hypervisor for QEMU. -type QemuHypervisor struct { - // Binary is the path to the qemu binary (defaults to qemu-system-x86_64). - Binary string -} - -// NewQemuHypervisor creates a new QEMU hypervisor instance. -func NewQemuHypervisor() *QemuHypervisor { - return &QemuHypervisor{ - Binary: "qemu-system-x86_64", - } -} - -// Name returns the hypervisor name. -func (q *QemuHypervisor) Name() string { - return "qemu" -} - -// Available checks if QEMU is installed and accessible. -func (q *QemuHypervisor) Available() bool { - _, err := exec.LookPath(q.Binary) - return err == nil -} - -// BuildCommand creates the QEMU command for running a VM. -func (q *QemuHypervisor) BuildCommand(ctx context.Context, image string, opts *HypervisorOptions) (*exec.Cmd, error) { - format := DetectImageFormat(image) - if format == FormatUnknown { - return nil, fmt.Errorf("unknown image format: %s", image) - } - - args := []string{ - "-m", fmt.Sprintf("%d", opts.Memory), - "-smp", fmt.Sprintf("%d", opts.CPUs), - "-enable-kvm", - } - - // Add the image based on format - switch format { - case FormatISO: - args = append(args, "-cdrom", image) - args = append(args, "-boot", "d") - case FormatQCOW2: - args = append(args, "-drive", fmt.Sprintf("file=%s,format=qcow2", image)) - case FormatVMDK: - args = append(args, "-drive", fmt.Sprintf("file=%s,format=vmdk", image)) - case FormatRaw: - args = append(args, "-drive", fmt.Sprintf("file=%s,format=raw", image)) - } - - // Always run in nographic mode for container-like behavior - args = append(args, "-nographic") - - // Add serial console for log output - args = append(args, "-serial", "stdio") - - // Network with port forwarding - netdev := "user,id=net0" - if opts.SSHPort > 0 { - netdev += fmt.Sprintf(",hostfwd=tcp::%d-:22", opts.SSHPort) - } - for hostPort, guestPort := range opts.Ports { - netdev += fmt.Sprintf(",hostfwd=tcp::%d-:%d", hostPort, guestPort) - } - args = append(args, "-netdev", netdev) - args = append(args, "-device", "virtio-net-pci,netdev=net0") - - // Add 9p shares for volumes - shareID := 0 - for hostPath, guestPath := range opts.Volumes { - tag := fmt.Sprintf("share%d", shareID) - args = append(args, - "-fsdev", fmt.Sprintf("local,id=%s,path=%s,security_model=none", tag, hostPath), - "-device", fmt.Sprintf("virtio-9p-pci,fsdev=%s,mount_tag=%s", tag, filepath.Base(guestPath)), - ) - shareID++ - } - - // Check if KVM is available on Linux, remove -enable-kvm if not - if runtime.GOOS != "linux" || !isKVMAvailable() { - // Remove -enable-kvm from args - newArgs := make([]string, 0, len(args)) - for _, arg := range args { - if arg != "-enable-kvm" { - newArgs = append(newArgs, arg) - } - } - args = newArgs - - // On macOS, use HVF acceleration if available - if runtime.GOOS == "darwin" { - args = append(args, "-accel", "hvf") - } - } - - cmd := exec.CommandContext(ctx, q.Binary, args...) - return cmd, nil -} - -// isKVMAvailable checks if KVM is available on the system. -func isKVMAvailable() bool { - _, err := os.Stat("/dev/kvm") - return err == nil -} - -// HyperkitHypervisor implements Hypervisor for macOS Hyperkit. -type HyperkitHypervisor struct { - // Binary is the path to the hyperkit binary. - Binary string -} - -// NewHyperkitHypervisor creates a new Hyperkit hypervisor instance. -func NewHyperkitHypervisor() *HyperkitHypervisor { - return &HyperkitHypervisor{ - Binary: "hyperkit", - } -} - -// Name returns the hypervisor name. -func (h *HyperkitHypervisor) Name() string { - return "hyperkit" -} - -// Available checks if Hyperkit is installed and accessible. -func (h *HyperkitHypervisor) Available() bool { - if runtime.GOOS != "darwin" { - return false - } - _, err := exec.LookPath(h.Binary) - return err == nil -} - -// BuildCommand creates the Hyperkit command for running a VM. -func (h *HyperkitHypervisor) BuildCommand(ctx context.Context, image string, opts *HypervisorOptions) (*exec.Cmd, error) { - format := DetectImageFormat(image) - if format == FormatUnknown { - return nil, fmt.Errorf("unknown image format: %s", image) - } - - args := []string{ - "-m", fmt.Sprintf("%dM", opts.Memory), - "-c", fmt.Sprintf("%d", opts.CPUs), - "-A", // ACPI - "-u", // Unlimited console output - "-s", "0:0,hostbridge", - "-s", "31,lpc", - "-l", "com1,stdio", // Serial console - } - - // Add PCI slot for disk (slot 2) - switch format { - case FormatISO: - args = append(args, "-s", fmt.Sprintf("2:0,ahci-cd,%s", image)) - case FormatQCOW2, FormatVMDK, FormatRaw: - args = append(args, "-s", fmt.Sprintf("2:0,virtio-blk,%s", image)) - } - - // Network with port forwarding (slot 3) - netArgs := "virtio-net" - if opts.SSHPort > 0 || len(opts.Ports) > 0 { - // Hyperkit uses slirp for user networking with port forwarding - portForwards := make([]string, 0) - if opts.SSHPort > 0 { - portForwards = append(portForwards, fmt.Sprintf("tcp:%d:22", opts.SSHPort)) - } - for hostPort, guestPort := range opts.Ports { - portForwards = append(portForwards, fmt.Sprintf("tcp:%d:%d", hostPort, guestPort)) - } - if len(portForwards) > 0 { - netArgs += "," + strings.Join(portForwards, ",") - } - } - args = append(args, "-s", "3:0,"+netArgs) - - cmd := exec.CommandContext(ctx, h.Binary, args...) - return cmd, nil -} - -// DetectImageFormat determines the image format from its file extension. -func DetectImageFormat(path string) ImageFormat { - ext := strings.ToLower(filepath.Ext(path)) - switch ext { - case ".iso": - return FormatISO - case ".qcow2": - return FormatQCOW2 - case ".vmdk": - return FormatVMDK - case ".raw", ".img": - return FormatRaw - default: - return FormatUnknown - } -} - -// DetectHypervisor returns the best available hypervisor for the current platform. -func DetectHypervisor() (Hypervisor, error) { - // On macOS, prefer Hyperkit if available, fall back to QEMU - if runtime.GOOS == "darwin" { - hk := NewHyperkitHypervisor() - if hk.Available() { - return hk, nil - } - } - - // Try QEMU on all platforms - qemu := NewQemuHypervisor() - if qemu.Available() { - return qemu, nil - } - - return nil, fmt.Errorf("no hypervisor available: install qemu or hyperkit (macOS)") -} - -// GetHypervisor returns a specific hypervisor by name. -func GetHypervisor(name string) (Hypervisor, error) { - switch strings.ToLower(name) { - case "qemu": - h := NewQemuHypervisor() - if !h.Available() { - return nil, fmt.Errorf("qemu is not available") - } - return h, nil - case "hyperkit": - h := NewHyperkitHypervisor() - if !h.Available() { - return nil, fmt.Errorf("hyperkit is not available (requires macOS)") - } - return h, nil - default: - return nil, fmt.Errorf("unknown hypervisor: %s", name) - } -} diff --git a/pkg/container/hypervisor_test.go b/pkg/container/hypervisor_test.go deleted file mode 100644 index e5c9964..0000000 --- a/pkg/container/hypervisor_test.go +++ /dev/null @@ -1,358 +0,0 @@ -package container - -import ( - "context" - "runtime" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestQemuHypervisor_Available_Good(t *testing.T) { - q := NewQemuHypervisor() - - // Check if qemu is available on this system - available := q.Available() - - // We just verify it returns a boolean without error - // The actual availability depends on the system - assert.IsType(t, true, available) -} - -func TestQemuHypervisor_Available_Bad_InvalidBinary(t *testing.T) { - q := &QemuHypervisor{ - Binary: "nonexistent-qemu-binary-that-does-not-exist", - } - - available := q.Available() - - assert.False(t, available) -} - -func TestHyperkitHypervisor_Available_Good(t *testing.T) { - h := NewHyperkitHypervisor() - - available := h.Available() - - // On non-darwin systems, should always be false - if runtime.GOOS != "darwin" { - assert.False(t, available) - } else { - // On darwin, just verify it returns a boolean - assert.IsType(t, true, available) - } -} - -func TestHyperkitHypervisor_Available_Bad_NotDarwin(t *testing.T) { - if runtime.GOOS == "darwin" { - t.Skip("This test only runs on non-darwin systems") - } - - h := NewHyperkitHypervisor() - - available := h.Available() - - assert.False(t, available, "Hyperkit should not be available on non-darwin systems") -} - -func TestHyperkitHypervisor_Available_Bad_InvalidBinary(t *testing.T) { - h := &HyperkitHypervisor{ - Binary: "nonexistent-hyperkit-binary-that-does-not-exist", - } - - available := h.Available() - - assert.False(t, available) -} - -func TestIsKVMAvailable_Good(t *testing.T) { - // This test verifies the function runs without error - // The actual result depends on the system - result := isKVMAvailable() - - // On non-linux systems, should be false - if runtime.GOOS != "linux" { - assert.False(t, result, "KVM should not be available on non-linux systems") - } else { - // On linux, just verify it returns a boolean - assert.IsType(t, true, result) - } -} - -func TestDetectHypervisor_Good(t *testing.T) { - // DetectHypervisor tries to find an available hypervisor - hv, err := DetectHypervisor() - - // This test may pass or fail depending on system configuration - // If no hypervisor is available, it should return an error - if err != nil { - assert.Nil(t, hv) - assert.Contains(t, err.Error(), "no hypervisor available") - } else { - assert.NotNil(t, hv) - assert.NotEmpty(t, hv.Name()) - } -} - -func TestGetHypervisor_Good_Qemu(t *testing.T) { - hv, err := GetHypervisor("qemu") - - // Depends on whether qemu is installed - if err != nil { - assert.Contains(t, err.Error(), "not available") - } else { - assert.NotNil(t, hv) - assert.Equal(t, "qemu", hv.Name()) - } -} - -func TestGetHypervisor_Good_QemuUppercase(t *testing.T) { - hv, err := GetHypervisor("QEMU") - - // Depends on whether qemu is installed - if err != nil { - assert.Contains(t, err.Error(), "not available") - } else { - assert.NotNil(t, hv) - assert.Equal(t, "qemu", hv.Name()) - } -} - -func TestGetHypervisor_Good_Hyperkit(t *testing.T) { - hv, err := GetHypervisor("hyperkit") - - // On non-darwin systems, should always fail - if runtime.GOOS != "darwin" { - assert.Error(t, err) - assert.Contains(t, err.Error(), "not available") - } else { - // On darwin, depends on whether hyperkit is installed - if err != nil { - assert.Contains(t, err.Error(), "not available") - } else { - assert.NotNil(t, hv) - assert.Equal(t, "hyperkit", hv.Name()) - } - } -} - -func TestGetHypervisor_Bad_Unknown(t *testing.T) { - _, err := GetHypervisor("unknown-hypervisor") - - assert.Error(t, err) - assert.Contains(t, err.Error(), "unknown hypervisor") -} - -func TestQemuHypervisor_BuildCommand_Good_WithPortsAndVolumes(t *testing.T) { - q := NewQemuHypervisor() - - ctx := context.Background() - opts := &HypervisorOptions{ - Memory: 2048, - CPUs: 4, - SSHPort: 2222, - Ports: map[int]int{8080: 80, 443: 443}, - Volumes: map[string]string{ - "/host/data": "/container/data", - "/host/logs": "/container/logs", - }, - Detach: true, - } - - cmd, err := q.BuildCommand(ctx, "/path/to/image.iso", opts) - require.NoError(t, err) - assert.NotNil(t, cmd) - - // Verify command includes all expected args - args := cmd.Args - assert.Contains(t, args, "-m") - assert.Contains(t, args, "2048") - assert.Contains(t, args, "-smp") - assert.Contains(t, args, "4") -} - -func TestQemuHypervisor_BuildCommand_Good_QCow2Format(t *testing.T) { - q := NewQemuHypervisor() - - ctx := context.Background() - opts := &HypervisorOptions{Memory: 1024, CPUs: 1} - - cmd, err := q.BuildCommand(ctx, "/path/to/image.qcow2", opts) - require.NoError(t, err) - - // Check that the drive format is qcow2 - found := false - for _, arg := range cmd.Args { - if arg == "file=/path/to/image.qcow2,format=qcow2" { - found = true - break - } - } - assert.True(t, found, "Should have qcow2 drive argument") -} - -func TestQemuHypervisor_BuildCommand_Good_VMDKFormat(t *testing.T) { - q := NewQemuHypervisor() - - ctx := context.Background() - opts := &HypervisorOptions{Memory: 1024, CPUs: 1} - - cmd, err := q.BuildCommand(ctx, "/path/to/image.vmdk", opts) - require.NoError(t, err) - - // Check that the drive format is vmdk - found := false - for _, arg := range cmd.Args { - if arg == "file=/path/to/image.vmdk,format=vmdk" { - found = true - break - } - } - assert.True(t, found, "Should have vmdk drive argument") -} - -func TestQemuHypervisor_BuildCommand_Good_RawFormat(t *testing.T) { - q := NewQemuHypervisor() - - ctx := context.Background() - opts := &HypervisorOptions{Memory: 1024, CPUs: 1} - - cmd, err := q.BuildCommand(ctx, "/path/to/image.raw", opts) - require.NoError(t, err) - - // Check that the drive format is raw - found := false - for _, arg := range cmd.Args { - if arg == "file=/path/to/image.raw,format=raw" { - found = true - break - } - } - assert.True(t, found, "Should have raw drive argument") -} - -func TestHyperkitHypervisor_BuildCommand_Good_WithPorts(t *testing.T) { - h := NewHyperkitHypervisor() - - ctx := context.Background() - opts := &HypervisorOptions{ - Memory: 1024, - CPUs: 2, - SSHPort: 2222, - Ports: map[int]int{8080: 80}, - } - - cmd, err := h.BuildCommand(ctx, "/path/to/image.iso", opts) - require.NoError(t, err) - assert.NotNil(t, cmd) - - // Verify it creates a command with memory and CPU args - args := cmd.Args - assert.Contains(t, args, "-m") - assert.Contains(t, args, "1024M") - assert.Contains(t, args, "-c") - assert.Contains(t, args, "2") -} - -func TestHyperkitHypervisor_BuildCommand_Good_QCow2Format(t *testing.T) { - h := NewHyperkitHypervisor() - - ctx := context.Background() - opts := &HypervisorOptions{Memory: 1024, CPUs: 1} - - cmd, err := h.BuildCommand(ctx, "/path/to/image.qcow2", opts) - require.NoError(t, err) - assert.NotNil(t, cmd) -} - -func TestHyperkitHypervisor_BuildCommand_Good_RawFormat(t *testing.T) { - h := NewHyperkitHypervisor() - - ctx := context.Background() - opts := &HypervisorOptions{Memory: 1024, CPUs: 1} - - cmd, err := h.BuildCommand(ctx, "/path/to/image.raw", opts) - require.NoError(t, err) - assert.NotNil(t, cmd) -} - -func TestHyperkitHypervisor_BuildCommand_Good_NoPorts(t *testing.T) { - h := NewHyperkitHypervisor() - - ctx := context.Background() - opts := &HypervisorOptions{ - Memory: 512, - CPUs: 1, - SSHPort: 0, // No SSH port - Ports: nil, - } - - cmd, err := h.BuildCommand(ctx, "/path/to/image.iso", opts) - require.NoError(t, err) - assert.NotNil(t, cmd) -} - -func TestQemuHypervisor_BuildCommand_Good_NoSSHPort(t *testing.T) { - q := NewQemuHypervisor() - - ctx := context.Background() - opts := &HypervisorOptions{ - Memory: 512, - CPUs: 1, - SSHPort: 0, // No SSH port - Ports: nil, - } - - cmd, err := q.BuildCommand(ctx, "/path/to/image.iso", opts) - require.NoError(t, err) - assert.NotNil(t, cmd) -} - -func TestQemuHypervisor_BuildCommand_Bad_UnknownFormat(t *testing.T) { - q := NewQemuHypervisor() - - ctx := context.Background() - opts := &HypervisorOptions{Memory: 1024, CPUs: 1} - - _, err := q.BuildCommand(ctx, "/path/to/image.txt", opts) - assert.Error(t, err) - assert.Contains(t, err.Error(), "unknown image format") -} - -func TestHyperkitHypervisor_BuildCommand_Bad_UnknownFormat(t *testing.T) { - h := NewHyperkitHypervisor() - - ctx := context.Background() - opts := &HypervisorOptions{Memory: 1024, CPUs: 1} - - _, err := h.BuildCommand(ctx, "/path/to/image.unknown", opts) - assert.Error(t, err) - assert.Contains(t, err.Error(), "unknown image format") -} - -func TestHyperkitHypervisor_Name_Good(t *testing.T) { - h := NewHyperkitHypervisor() - assert.Equal(t, "hyperkit", h.Name()) -} - -func TestHyperkitHypervisor_BuildCommand_Good_ISOFormat(t *testing.T) { - h := NewHyperkitHypervisor() - - ctx := context.Background() - opts := &HypervisorOptions{ - Memory: 1024, - CPUs: 2, - SSHPort: 2222, - } - - cmd, err := h.BuildCommand(ctx, "/path/to/image.iso", opts) - require.NoError(t, err) - assert.NotNil(t, cmd) - - args := cmd.Args - assert.Contains(t, args, "-m") - assert.Contains(t, args, "1024M") - assert.Contains(t, args, "-c") - assert.Contains(t, args, "2") -} diff --git a/pkg/container/linuxkit.go b/pkg/container/linuxkit.go deleted file mode 100644 index 8bf34d5..0000000 --- a/pkg/container/linuxkit.go +++ /dev/null @@ -1,433 +0,0 @@ -package container - -import ( - "bufio" - "context" - "fmt" - "io" - "os" - "os/exec" - "syscall" - "time" -) - -// LinuxKitManager implements the Manager interface for LinuxKit VMs. -type LinuxKitManager struct { - state *State - hypervisor Hypervisor -} - -// NewLinuxKitManager creates a new LinuxKit manager with auto-detected hypervisor. -func NewLinuxKitManager() (*LinuxKitManager, error) { - statePath, err := DefaultStatePath() - if err != nil { - return nil, fmt.Errorf("failed to determine state path: %w", err) - } - - state, err := LoadState(statePath) - if err != nil { - return nil, fmt.Errorf("failed to load state: %w", err) - } - - hypervisor, err := DetectHypervisor() - if err != nil { - return nil, err - } - - return &LinuxKitManager{ - state: state, - hypervisor: hypervisor, - }, nil -} - -// NewLinuxKitManagerWithHypervisor creates a manager with a specific hypervisor. -func NewLinuxKitManagerWithHypervisor(state *State, hypervisor Hypervisor) *LinuxKitManager { - return &LinuxKitManager{ - state: state, - hypervisor: hypervisor, - } -} - -// Run starts a new LinuxKit VM from the given image. -func (m *LinuxKitManager) Run(ctx context.Context, image string, opts RunOptions) (*Container, error) { - // Validate image exists - if _, err := os.Stat(image); err != nil { - return nil, fmt.Errorf("image not found: %s", image) - } - - // Detect image format - format := DetectImageFormat(image) - if format == FormatUnknown { - return nil, fmt.Errorf("unsupported image format: %s", image) - } - - // Generate container ID - id, err := GenerateID() - if err != nil { - return nil, fmt.Errorf("failed to generate container ID: %w", err) - } - - // Apply defaults - if opts.Memory <= 0 { - opts.Memory = 1024 - } - if opts.CPUs <= 0 { - opts.CPUs = 1 - } - if opts.SSHPort <= 0 { - opts.SSHPort = 2222 - } - - // Use name or generate from ID - name := opts.Name - if name == "" { - name = id[:8] - } - - // Ensure logs directory exists - if err := EnsureLogsDir(); err != nil { - return nil, fmt.Errorf("failed to create logs directory: %w", err) - } - - // Get log file path - logPath, err := LogPath(id) - if err != nil { - return nil, fmt.Errorf("failed to determine log path: %w", err) - } - - // Build hypervisor options - hvOpts := &HypervisorOptions{ - Memory: opts.Memory, - CPUs: opts.CPUs, - LogFile: logPath, - SSHPort: opts.SSHPort, - Ports: opts.Ports, - Volumes: opts.Volumes, - Detach: opts.Detach, - } - - // Build the command - cmd, err := m.hypervisor.BuildCommand(ctx, image, hvOpts) - if err != nil { - return nil, fmt.Errorf("failed to build hypervisor command: %w", err) - } - - // Create log file - logFile, err := os.Create(logPath) - if err != nil { - return nil, fmt.Errorf("failed to create log file: %w", err) - } - - // Create container record - container := &Container{ - ID: id, - Name: name, - Image: image, - Status: StatusRunning, - StartedAt: time.Now(), - Ports: opts.Ports, - Memory: opts.Memory, - CPUs: opts.CPUs, - } - - if opts.Detach { - // Run in background - cmd.Stdout = logFile - cmd.Stderr = logFile - - // Start the process - if err := cmd.Start(); err != nil { - logFile.Close() - return nil, fmt.Errorf("failed to start VM: %w", err) - } - - container.PID = cmd.Process.Pid - - // Save state - if err := m.state.Add(container); err != nil { - // Try to kill the process we just started - cmd.Process.Kill() - logFile.Close() - return nil, fmt.Errorf("failed to save state: %w", err) - } - - // Close log file handle (process has its own) - logFile.Close() - - // Start a goroutine to wait for process exit and update state - go m.waitForExit(container.ID, cmd) - - return container, nil - } - - // Run in foreground - // Tee output to both log file and stdout - stdout, err := cmd.StdoutPipe() - if err != nil { - logFile.Close() - return nil, fmt.Errorf("failed to get stdout pipe: %w", err) - } - - stderr, err := cmd.StderrPipe() - if err != nil { - logFile.Close() - return nil, fmt.Errorf("failed to get stderr pipe: %w", err) - } - - if err := cmd.Start(); err != nil { - logFile.Close() - return nil, fmt.Errorf("failed to start VM: %w", err) - } - - container.PID = cmd.Process.Pid - - // Save state before waiting - if err := m.state.Add(container); err != nil { - cmd.Process.Kill() - logFile.Close() - return nil, fmt.Errorf("failed to save state: %w", err) - } - - // Copy output to both log and stdout - go func() { - mw := io.MultiWriter(logFile, os.Stdout) - io.Copy(mw, stdout) - }() - go func() { - mw := io.MultiWriter(logFile, os.Stderr) - io.Copy(mw, stderr) - }() - - // Wait for the process to complete - if err := cmd.Wait(); err != nil { - container.Status = StatusError - } else { - container.Status = StatusStopped - } - - logFile.Close() - m.state.Update(container) - - return container, nil -} - -// waitForExit monitors a detached process and updates state when it exits. -func (m *LinuxKitManager) waitForExit(id string, cmd *exec.Cmd) { - cmd.Wait() - - container, ok := m.state.Get(id) - if ok { - container.Status = StatusStopped - m.state.Update(container) - } -} - -// Stop stops a running container by sending SIGTERM. -func (m *LinuxKitManager) Stop(ctx context.Context, id string) error { - container, ok := m.state.Get(id) - if !ok { - return fmt.Errorf("container not found: %s", id) - } - - if container.Status != StatusRunning { - return fmt.Errorf("container is not running: %s", id) - } - - // Find the process - process, err := os.FindProcess(container.PID) - if err != nil { - // Process doesn't exist, update state - container.Status = StatusStopped - m.state.Update(container) - return nil - } - - // Send SIGTERM - if err := process.Signal(syscall.SIGTERM); err != nil { - // Process might already be gone - container.Status = StatusStopped - m.state.Update(container) - return nil - } - - // Wait for graceful shutdown with timeout - done := make(chan struct{}) - go func() { - process.Wait() - close(done) - }() - - select { - case <-done: - // Process exited gracefully - case <-time.After(10 * time.Second): - // Force kill - process.Signal(syscall.SIGKILL) - <-done - case <-ctx.Done(): - // Context cancelled - process.Signal(syscall.SIGKILL) - return ctx.Err() - } - - container.Status = StatusStopped - return m.state.Update(container) -} - -// List returns all known containers, verifying process state. -func (m *LinuxKitManager) List(ctx context.Context) ([]*Container, error) { - containers := m.state.All() - - // Verify each running container's process is still alive - for _, c := range containers { - if c.Status == StatusRunning { - if !isProcessRunning(c.PID) { - c.Status = StatusStopped - m.state.Update(c) - } - } - } - - return containers, nil -} - -// isProcessRunning checks if a process with the given PID is still running. -func isProcessRunning(pid int) bool { - process, err := os.FindProcess(pid) - if err != nil { - return false - } - - // On Unix, FindProcess always succeeds, so we need to send signal 0 to check - err = process.Signal(syscall.Signal(0)) - return err == nil -} - -// Logs returns a reader for the container's log output. -func (m *LinuxKitManager) Logs(ctx context.Context, id string, follow bool) (io.ReadCloser, error) { - _, ok := m.state.Get(id) - if !ok { - return nil, fmt.Errorf("container not found: %s", id) - } - - logPath, err := LogPath(id) - if err != nil { - return nil, fmt.Errorf("failed to determine log path: %w", err) - } - - if _, err := os.Stat(logPath); err != nil { - if os.IsNotExist(err) { - return nil, fmt.Errorf("no logs available for container: %s", id) - } - return nil, err - } - - if !follow { - // Simple case: just open and return the file - return os.Open(logPath) - } - - // Follow mode: create a reader that tails the file - return newFollowReader(ctx, logPath) -} - -// followReader implements io.ReadCloser for following log files. -type followReader struct { - file *os.File - ctx context.Context - cancel context.CancelFunc - reader *bufio.Reader -} - -func newFollowReader(ctx context.Context, path string) (*followReader, error) { - file, err := os.Open(path) - if err != nil { - return nil, err - } - - // Seek to end - file.Seek(0, io.SeekEnd) - - ctx, cancel := context.WithCancel(ctx) - - return &followReader{ - file: file, - ctx: ctx, - cancel: cancel, - reader: bufio.NewReader(file), - }, nil -} - -func (f *followReader) Read(p []byte) (int, error) { - for { - select { - case <-f.ctx.Done(): - return 0, io.EOF - default: - } - - n, err := f.reader.Read(p) - if n > 0 { - return n, nil - } - if err != nil && err != io.EOF { - return 0, err - } - - // No data available, wait a bit and try again - select { - case <-f.ctx.Done(): - return 0, io.EOF - case <-time.After(100 * time.Millisecond): - // Reset reader to pick up new data - f.reader.Reset(f.file) - } - } -} - -func (f *followReader) Close() error { - f.cancel() - return f.file.Close() -} - -// Exec executes a command inside the container via SSH. -func (m *LinuxKitManager) Exec(ctx context.Context, id string, cmd []string) error { - container, ok := m.state.Get(id) - if !ok { - return fmt.Errorf("container not found: %s", id) - } - - if container.Status != StatusRunning { - return fmt.Errorf("container is not running: %s", id) - } - - // Default SSH port - sshPort := 2222 - - // Build SSH command - sshArgs := []string{ - "-p", fmt.Sprintf("%d", sshPort), - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", - "-o", "LogLevel=ERROR", - "root@localhost", - } - sshArgs = append(sshArgs, cmd...) - - sshCmd := exec.CommandContext(ctx, "ssh", sshArgs...) - sshCmd.Stdin = os.Stdin - sshCmd.Stdout = os.Stdout - sshCmd.Stderr = os.Stderr - - return sshCmd.Run() -} - -// State returns the manager's state (for testing). -func (m *LinuxKitManager) State() *State { - return m.state -} - -// Hypervisor returns the manager's hypervisor (for testing). -func (m *LinuxKitManager) Hypervisor() Hypervisor { - return m.hypervisor -} diff --git a/pkg/container/linuxkit_test.go b/pkg/container/linuxkit_test.go deleted file mode 100644 index 5c65393..0000000 --- a/pkg/container/linuxkit_test.go +++ /dev/null @@ -1,781 +0,0 @@ -package container - -import ( - "context" - "os" - "os/exec" - "path/filepath" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// MockHypervisor is a mock implementation for testing. -type MockHypervisor struct { - name string - available bool - buildErr error - lastImage string - lastOpts *HypervisorOptions - commandToRun string -} - -func NewMockHypervisor() *MockHypervisor { - return &MockHypervisor{ - name: "mock", - available: true, - commandToRun: "echo", - } -} - -func (m *MockHypervisor) Name() string { - return m.name -} - -func (m *MockHypervisor) Available() bool { - return m.available -} - -func (m *MockHypervisor) BuildCommand(ctx context.Context, image string, opts *HypervisorOptions) (*exec.Cmd, error) { - m.lastImage = image - m.lastOpts = opts - if m.buildErr != nil { - return nil, m.buildErr - } - // Return a simple command that exits quickly - return exec.CommandContext(ctx, m.commandToRun, "test"), nil -} - -// newTestManager creates a LinuxKitManager with mock hypervisor for testing. -// Uses manual temp directory management to avoid race conditions with t.TempDir cleanup. -func newTestManager(t *testing.T) (*LinuxKitManager, *MockHypervisor, string) { - tmpDir, err := os.MkdirTemp("", "linuxkit-test-*") - require.NoError(t, err) - - // Manual cleanup that handles race conditions with state file writes - t.Cleanup(func() { - // Give any pending file operations time to complete - time.Sleep(10 * time.Millisecond) - _ = os.RemoveAll(tmpDir) - }) - - statePath := filepath.Join(tmpDir, "containers.json") - - state, err := LoadState(statePath) - require.NoError(t, err) - - mock := NewMockHypervisor() - manager := NewLinuxKitManagerWithHypervisor(state, mock) - - return manager, mock, tmpDir -} - -func TestNewLinuxKitManagerWithHypervisor_Good(t *testing.T) { - tmpDir := t.TempDir() - statePath := filepath.Join(tmpDir, "containers.json") - state, _ := LoadState(statePath) - mock := NewMockHypervisor() - - manager := NewLinuxKitManagerWithHypervisor(state, mock) - - assert.NotNil(t, manager) - assert.Equal(t, state, manager.State()) - assert.Equal(t, mock, manager.Hypervisor()) -} - -func TestLinuxKitManager_Run_Good_Detached(t *testing.T) { - manager, mock, tmpDir := newTestManager(t) - - // Create a test image file - imagePath := filepath.Join(tmpDir, "test.iso") - err := os.WriteFile(imagePath, []byte("fake image"), 0644) - require.NoError(t, err) - - // Use a command that runs briefly then exits - mock.commandToRun = "sleep" - - ctx := context.Background() - opts := RunOptions{ - Name: "test-vm", - Detach: true, - Memory: 512, - CPUs: 2, - } - - container, err := manager.Run(ctx, imagePath, opts) - require.NoError(t, err) - - assert.NotEmpty(t, container.ID) - assert.Equal(t, "test-vm", container.Name) - assert.Equal(t, imagePath, container.Image) - assert.Equal(t, StatusRunning, container.Status) - assert.Greater(t, container.PID, 0) - assert.Equal(t, 512, container.Memory) - assert.Equal(t, 2, container.CPUs) - - // Verify hypervisor was called with correct options - assert.Equal(t, imagePath, mock.lastImage) - assert.Equal(t, 512, mock.lastOpts.Memory) - assert.Equal(t, 2, mock.lastOpts.CPUs) - - // Clean up - stop the container - time.Sleep(100 * time.Millisecond) -} - -func TestLinuxKitManager_Run_Good_DefaultValues(t *testing.T) { - manager, mock, tmpDir := newTestManager(t) - - imagePath := filepath.Join(tmpDir, "test.qcow2") - err := os.WriteFile(imagePath, []byte("fake image"), 0644) - require.NoError(t, err) - - ctx := context.Background() - opts := RunOptions{Detach: true} - - container, err := manager.Run(ctx, imagePath, opts) - require.NoError(t, err) - - // Check defaults were applied - assert.Equal(t, 1024, mock.lastOpts.Memory) - assert.Equal(t, 1, mock.lastOpts.CPUs) - assert.Equal(t, 2222, mock.lastOpts.SSHPort) - - // Name should default to first 8 chars of ID - assert.Equal(t, container.ID[:8], container.Name) - - // Wait for the mock process to complete to avoid temp dir cleanup issues - time.Sleep(50 * time.Millisecond) -} - -func TestLinuxKitManager_Run_Bad_ImageNotFound(t *testing.T) { - manager, _, _ := newTestManager(t) - - ctx := context.Background() - opts := RunOptions{Detach: true} - - _, err := manager.Run(ctx, "/nonexistent/image.iso", opts) - assert.Error(t, err) - assert.Contains(t, err.Error(), "image not found") -} - -func TestLinuxKitManager_Run_Bad_UnsupportedFormat(t *testing.T) { - manager, _, tmpDir := newTestManager(t) - - imagePath := filepath.Join(tmpDir, "test.txt") - err := os.WriteFile(imagePath, []byte("not an image"), 0644) - require.NoError(t, err) - - ctx := context.Background() - opts := RunOptions{Detach: true} - - _, err = manager.Run(ctx, imagePath, opts) - assert.Error(t, err) - assert.Contains(t, err.Error(), "unsupported image format") -} - -func TestLinuxKitManager_Stop_Good(t *testing.T) { - manager, _, _ := newTestManager(t) - - // Add a fake running container with a non-existent PID - // The Stop function should handle this gracefully - container := &Container{ - ID: "abc12345", - Status: StatusRunning, - PID: 999999, // Non-existent PID - StartedAt: time.Now(), - } - manager.State().Add(container) - - ctx := context.Background() - err := manager.Stop(ctx, "abc12345") - - // Stop should succeed (process doesn't exist, so container is marked stopped) - assert.NoError(t, err) - - // Verify the container status was updated - c, ok := manager.State().Get("abc12345") - assert.True(t, ok) - assert.Equal(t, StatusStopped, c.Status) -} - -func TestLinuxKitManager_Stop_Bad_NotFound(t *testing.T) { - manager, _, _ := newTestManager(t) - - ctx := context.Background() - err := manager.Stop(ctx, "nonexistent") - - assert.Error(t, err) - assert.Contains(t, err.Error(), "container not found") -} - -func TestLinuxKitManager_Stop_Bad_NotRunning(t *testing.T) { - manager, _, tmpDir := newTestManager(t) - statePath := filepath.Join(tmpDir, "containers.json") - state, _ := LoadState(statePath) - manager = NewLinuxKitManagerWithHypervisor(state, NewMockHypervisor()) - - container := &Container{ - ID: "abc12345", - Status: StatusStopped, - } - state.Add(container) - - ctx := context.Background() - err := manager.Stop(ctx, "abc12345") - - assert.Error(t, err) - assert.Contains(t, err.Error(), "not running") -} - -func TestLinuxKitManager_List_Good(t *testing.T) { - manager, _, tmpDir := newTestManager(t) - statePath := filepath.Join(tmpDir, "containers.json") - state, _ := LoadState(statePath) - manager = NewLinuxKitManagerWithHypervisor(state, NewMockHypervisor()) - - state.Add(&Container{ID: "aaa11111", Status: StatusStopped}) - state.Add(&Container{ID: "bbb22222", Status: StatusStopped}) - - ctx := context.Background() - containers, err := manager.List(ctx) - - require.NoError(t, err) - assert.Len(t, containers, 2) -} - -func TestLinuxKitManager_List_Good_VerifiesRunningStatus(t *testing.T) { - manager, _, tmpDir := newTestManager(t) - statePath := filepath.Join(tmpDir, "containers.json") - state, _ := LoadState(statePath) - manager = NewLinuxKitManagerWithHypervisor(state, NewMockHypervisor()) - - // Add a "running" container with a fake PID that doesn't exist - state.Add(&Container{ - ID: "abc12345", - Status: StatusRunning, - PID: 999999, // PID that almost certainly doesn't exist - }) - - ctx := context.Background() - containers, err := manager.List(ctx) - - require.NoError(t, err) - assert.Len(t, containers, 1) - // Status should have been updated to stopped since PID doesn't exist - assert.Equal(t, StatusStopped, containers[0].Status) -} - -func TestLinuxKitManager_Logs_Good(t *testing.T) { - manager, _, tmpDir := newTestManager(t) - - // Create a log file manually - logsDir := filepath.Join(tmpDir, "logs") - os.MkdirAll(logsDir, 0755) - - container := &Container{ID: "abc12345"} - manager.State().Add(container) - - // Override the default logs dir for testing by creating the log file - // at the expected location - logContent := "test log content\nline 2\n" - logPath, _ := LogPath("abc12345") - os.MkdirAll(filepath.Dir(logPath), 0755) - os.WriteFile(logPath, []byte(logContent), 0644) - - ctx := context.Background() - reader, err := manager.Logs(ctx, "abc12345", false) - - require.NoError(t, err) - defer reader.Close() - - buf := make([]byte, 1024) - n, _ := reader.Read(buf) - assert.Equal(t, logContent, string(buf[:n])) -} - -func TestLinuxKitManager_Logs_Bad_NotFound(t *testing.T) { - manager, _, _ := newTestManager(t) - - ctx := context.Background() - _, err := manager.Logs(ctx, "nonexistent", false) - - assert.Error(t, err) - assert.Contains(t, err.Error(), "container not found") -} - -func TestLinuxKitManager_Logs_Bad_NoLogFile(t *testing.T) { - manager, _, _ := newTestManager(t) - - // Use a unique ID that won't have a log file - uniqueID, _ := GenerateID() - container := &Container{ID: uniqueID} - manager.State().Add(container) - - ctx := context.Background() - reader, err := manager.Logs(ctx, uniqueID, false) - - // If logs existed somehow, clean up the reader - if reader != nil { - reader.Close() - } - - assert.Error(t, err) - if err != nil { - assert.Contains(t, err.Error(), "no logs available") - } -} - -func TestLinuxKitManager_Exec_Bad_NotFound(t *testing.T) { - manager, _, _ := newTestManager(t) - - ctx := context.Background() - err := manager.Exec(ctx, "nonexistent", []string{"ls"}) - - assert.Error(t, err) - assert.Contains(t, err.Error(), "container not found") -} - -func TestLinuxKitManager_Exec_Bad_NotRunning(t *testing.T) { - manager, _, _ := newTestManager(t) - - container := &Container{ID: "abc12345", Status: StatusStopped} - manager.State().Add(container) - - ctx := context.Background() - err := manager.Exec(ctx, "abc12345", []string{"ls"}) - - assert.Error(t, err) - assert.Contains(t, err.Error(), "not running") -} - -func TestDetectImageFormat_Good(t *testing.T) { - tests := []struct { - path string - format ImageFormat - }{ - {"/path/to/image.iso", FormatISO}, - {"/path/to/image.ISO", FormatISO}, - {"/path/to/image.qcow2", FormatQCOW2}, - {"/path/to/image.QCOW2", FormatQCOW2}, - {"/path/to/image.vmdk", FormatVMDK}, - {"/path/to/image.raw", FormatRaw}, - {"/path/to/image.img", FormatRaw}, - {"image.iso", FormatISO}, - } - - for _, tt := range tests { - t.Run(tt.path, func(t *testing.T) { - assert.Equal(t, tt.format, DetectImageFormat(tt.path)) - }) - } -} - -func TestDetectImageFormat_Bad_Unknown(t *testing.T) { - tests := []string{ - "/path/to/image.txt", - "/path/to/image", - "noextension", - "/path/to/image.docx", - } - - for _, path := range tests { - t.Run(path, func(t *testing.T) { - assert.Equal(t, FormatUnknown, DetectImageFormat(path)) - }) - } -} - -func TestQemuHypervisor_Name_Good(t *testing.T) { - q := NewQemuHypervisor() - assert.Equal(t, "qemu", q.Name()) -} - -func TestQemuHypervisor_BuildCommand_Good(t *testing.T) { - q := NewQemuHypervisor() - - ctx := context.Background() - opts := &HypervisorOptions{ - Memory: 2048, - CPUs: 4, - SSHPort: 2222, - Ports: map[int]int{8080: 80}, - Detach: true, - } - - cmd, err := q.BuildCommand(ctx, "/path/to/image.iso", opts) - require.NoError(t, err) - assert.NotNil(t, cmd) - - // Check command path - assert.Contains(t, cmd.Path, "qemu") - - // Check that args contain expected values - args := cmd.Args - assert.Contains(t, args, "-m") - assert.Contains(t, args, "2048") - assert.Contains(t, args, "-smp") - assert.Contains(t, args, "4") - assert.Contains(t, args, "-nographic") -} - - -func TestLinuxKitManager_Logs_Good_Follow(t *testing.T) { - manager, _, _ := newTestManager(t) - - // Create a unique container ID - uniqueID, _ := GenerateID() - container := &Container{ID: uniqueID} - manager.State().Add(container) - - // Create a log file at the expected location - logPath, err := LogPath(uniqueID) - require.NoError(t, err) - os.MkdirAll(filepath.Dir(logPath), 0755) - - // Write initial content - err = os.WriteFile(logPath, []byte("initial log content\n"), 0644) - require.NoError(t, err) - - // Create a cancellable context - ctx, cancel := context.WithCancel(context.Background()) - - // Get the follow reader - reader, err := manager.Logs(ctx, uniqueID, true) - require.NoError(t, err) - - // Cancel the context to stop the follow - cancel() - - // Read should return EOF after context cancellation - buf := make([]byte, 1024) - _, readErr := reader.Read(buf) - // After context cancel, Read should return EOF - assert.Equal(t, "EOF", readErr.Error()) - - // Close the reader - err = reader.Close() - assert.NoError(t, err) -} - -func TestFollowReader_Read_Good_WithData(t *testing.T) { - tmpDir := t.TempDir() - logPath := filepath.Join(tmpDir, "test.log") - - // Create log file with content - content := "test log line 1\ntest log line 2\n" - err := os.WriteFile(logPath, []byte(content), 0644) - require.NoError(t, err) - - ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) - defer cancel() - - reader, err := newFollowReader(ctx, logPath) - require.NoError(t, err) - defer reader.Close() - - // The followReader seeks to end, so we need to append more content - f, err := os.OpenFile(logPath, os.O_APPEND|os.O_WRONLY, 0644) - require.NoError(t, err) - _, err = f.WriteString("new line\n") - require.NoError(t, err) - f.Close() - - // Give the reader time to poll - time.Sleep(150 * time.Millisecond) - - buf := make([]byte, 1024) - n, err := reader.Read(buf) - if err == nil { - assert.Greater(t, n, 0) - } -} - -func TestFollowReader_Read_Good_ContextCancel(t *testing.T) { - tmpDir := t.TempDir() - logPath := filepath.Join(tmpDir, "test.log") - - // Create log file - err := os.WriteFile(logPath, []byte("initial content\n"), 0644) - require.NoError(t, err) - - ctx, cancel := context.WithCancel(context.Background()) - - reader, err := newFollowReader(ctx, logPath) - require.NoError(t, err) - - // Cancel the context - cancel() - - // Read should return EOF - buf := make([]byte, 1024) - _, readErr := reader.Read(buf) - assert.Equal(t, "EOF", readErr.Error()) - - reader.Close() -} - -func TestFollowReader_Close_Good(t *testing.T) { - tmpDir := t.TempDir() - logPath := filepath.Join(tmpDir, "test.log") - - err := os.WriteFile(logPath, []byte("content\n"), 0644) - require.NoError(t, err) - - ctx := context.Background() - reader, err := newFollowReader(ctx, logPath) - require.NoError(t, err) - - err = reader.Close() - assert.NoError(t, err) - - // Reading after close should fail or return EOF - buf := make([]byte, 1024) - _, readErr := reader.Read(buf) - assert.Error(t, readErr) -} - -func TestNewFollowReader_Bad_FileNotFound(t *testing.T) { - ctx := context.Background() - _, err := newFollowReader(ctx, "/nonexistent/path/to/file.log") - - assert.Error(t, err) -} - -func TestLinuxKitManager_Run_Bad_BuildCommandError(t *testing.T) { - manager, mock, tmpDir := newTestManager(t) - - // Create a test image file - imagePath := filepath.Join(tmpDir, "test.iso") - err := os.WriteFile(imagePath, []byte("fake image"), 0644) - require.NoError(t, err) - - // Configure mock to return an error - mock.buildErr = assert.AnError - - ctx := context.Background() - opts := RunOptions{Detach: true} - - _, err = manager.Run(ctx, imagePath, opts) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to build hypervisor command") -} - -func TestLinuxKitManager_Run_Good_Foreground(t *testing.T) { - manager, mock, tmpDir := newTestManager(t) - - // Create a test image file - imagePath := filepath.Join(tmpDir, "test.iso") - err := os.WriteFile(imagePath, []byte("fake image"), 0644) - require.NoError(t, err) - - // Use echo which exits quickly - mock.commandToRun = "echo" - - ctx := context.Background() - opts := RunOptions{ - Name: "test-foreground", - Detach: false, // Run in foreground - Memory: 512, - CPUs: 1, - } - - container, err := manager.Run(ctx, imagePath, opts) - require.NoError(t, err) - - assert.NotEmpty(t, container.ID) - assert.Equal(t, "test-foreground", container.Name) - // Foreground process should have completed - assert.Equal(t, StatusStopped, container.Status) -} - -func TestLinuxKitManager_Stop_Good_ContextCancelled(t *testing.T) { - manager, mock, tmpDir := newTestManager(t) - - // Create a test image file - imagePath := filepath.Join(tmpDir, "test.iso") - err := os.WriteFile(imagePath, []byte("fake image"), 0644) - require.NoError(t, err) - - // Use a command that takes a long time - mock.commandToRun = "sleep" - - // Start a container - ctx := context.Background() - opts := RunOptions{ - Name: "test-cancel", - Detach: true, - } - - container, err := manager.Run(ctx, imagePath, opts) - require.NoError(t, err) - - // Ensure cleanup happens regardless of test outcome - t.Cleanup(func() { - _ = manager.Stop(context.Background(), container.ID) - }) - - // Create a context that's already cancelled - cancelCtx, cancel := context.WithCancel(context.Background()) - cancel() - - // Stop with cancelled context - err = manager.Stop(cancelCtx, container.ID) - // Should return context error - assert.Error(t, err) - assert.Equal(t, context.Canceled, err) -} - -func TestIsProcessRunning_Good_ExistingProcess(t *testing.T) { - // Use our own PID which definitely exists - running := isProcessRunning(os.Getpid()) - assert.True(t, running) -} - -func TestIsProcessRunning_Bad_NonexistentProcess(t *testing.T) { - // Use a PID that almost certainly doesn't exist - running := isProcessRunning(999999) - assert.False(t, running) -} - -func TestLinuxKitManager_Run_Good_WithPortsAndVolumes(t *testing.T) { - manager, mock, tmpDir := newTestManager(t) - - imagePath := filepath.Join(tmpDir, "test.iso") - err := os.WriteFile(imagePath, []byte("fake image"), 0644) - require.NoError(t, err) - - ctx := context.Background() - opts := RunOptions{ - Name: "test-ports", - Detach: true, - Memory: 512, - CPUs: 1, - SSHPort: 2223, - Ports: map[int]int{8080: 80, 443: 443}, - Volumes: map[string]string{"/host/data": "/container/data"}, - } - - container, err := manager.Run(ctx, imagePath, opts) - require.NoError(t, err) - - assert.NotEmpty(t, container.ID) - assert.Equal(t, map[int]int{8080: 80, 443: 443}, container.Ports) - assert.Equal(t, 2223, mock.lastOpts.SSHPort) - assert.Equal(t, map[string]string{"/host/data": "/container/data"}, mock.lastOpts.Volumes) - - time.Sleep(50 * time.Millisecond) -} - -func TestFollowReader_Read_Good_ReaderError(t *testing.T) { - tmpDir := t.TempDir() - logPath := filepath.Join(tmpDir, "test.log") - - // Create log file - err := os.WriteFile(logPath, []byte("content\n"), 0644) - require.NoError(t, err) - - ctx := context.Background() - reader, err := newFollowReader(ctx, logPath) - require.NoError(t, err) - - // Close the underlying file to cause read errors - reader.file.Close() - - // Read should return an error - buf := make([]byte, 1024) - _, readErr := reader.Read(buf) - assert.Error(t, readErr) -} - -func TestLinuxKitManager_Run_Bad_StartError(t *testing.T) { - manager, mock, tmpDir := newTestManager(t) - - imagePath := filepath.Join(tmpDir, "test.iso") - err := os.WriteFile(imagePath, []byte("fake image"), 0644) - require.NoError(t, err) - - // Use a command that doesn't exist to cause Start() to fail - mock.commandToRun = "/nonexistent/command/that/does/not/exist" - - ctx := context.Background() - opts := RunOptions{ - Name: "test-start-error", - Detach: true, - } - - _, err = manager.Run(ctx, imagePath, opts) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to start VM") -} - -func TestLinuxKitManager_Run_Bad_ForegroundStartError(t *testing.T) { - manager, mock, tmpDir := newTestManager(t) - - imagePath := filepath.Join(tmpDir, "test.iso") - err := os.WriteFile(imagePath, []byte("fake image"), 0644) - require.NoError(t, err) - - // Use a command that doesn't exist to cause Start() to fail - mock.commandToRun = "/nonexistent/command/that/does/not/exist" - - ctx := context.Background() - opts := RunOptions{ - Name: "test-foreground-error", - Detach: false, - } - - _, err = manager.Run(ctx, imagePath, opts) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to start VM") -} - -func TestLinuxKitManager_Run_Good_ForegroundWithError(t *testing.T) { - manager, mock, tmpDir := newTestManager(t) - - imagePath := filepath.Join(tmpDir, "test.iso") - err := os.WriteFile(imagePath, []byte("fake image"), 0644) - require.NoError(t, err) - - // Use a command that exits with error - mock.commandToRun = "false" // false command exits with code 1 - - ctx := context.Background() - opts := RunOptions{ - Name: "test-foreground-exit-error", - Detach: false, - } - - container, err := manager.Run(ctx, imagePath, opts) - require.NoError(t, err) // Run itself should succeed - - // Container should be in error state since process exited with error - assert.Equal(t, StatusError, container.Status) -} - -func TestLinuxKitManager_Stop_Good_ProcessExitedWhileRunning(t *testing.T) { - manager, _, _ := newTestManager(t) - - // Add a "running" container with a process that has already exited - // This simulates the race condition where process exits between status check - // and signal send - container := &Container{ - ID: "test1234", - Status: StatusRunning, - PID: 999999, // Non-existent PID - StartedAt: time.Now(), - } - manager.State().Add(container) - - ctx := context.Background() - err := manager.Stop(ctx, "test1234") - - // Stop should succeed gracefully - assert.NoError(t, err) - - // Container should be stopped - c, ok := manager.State().Get("test1234") - assert.True(t, ok) - assert.Equal(t, StatusStopped, c.Status) -} diff --git a/pkg/container/state.go b/pkg/container/state.go deleted file mode 100644 index 53ab1e2..0000000 --- a/pkg/container/state.go +++ /dev/null @@ -1,162 +0,0 @@ -package container - -import ( - "encoding/json" - "os" - "path/filepath" - "sync" -) - -// State manages persistent container state. -type State struct { - // Containers is a map of container ID to Container. - Containers map[string]*Container `json:"containers"` - - mu sync.RWMutex - filePath string -} - -// DefaultStateDir returns the default directory for state files (~/.core). -func DefaultStateDir() (string, error) { - home, err := os.UserHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".core"), nil -} - -// DefaultStatePath returns the default path for the state file. -func DefaultStatePath() (string, error) { - dir, err := DefaultStateDir() - if err != nil { - return "", err - } - return filepath.Join(dir, "containers.json"), nil -} - -// DefaultLogsDir returns the default directory for container logs. -func DefaultLogsDir() (string, error) { - dir, err := DefaultStateDir() - if err != nil { - return "", err - } - return filepath.Join(dir, "logs"), nil -} - -// NewState creates a new State instance. -func NewState(filePath string) *State { - return &State{ - Containers: make(map[string]*Container), - filePath: filePath, - } -} - -// LoadState loads the state from the given file path. -// If the file doesn't exist, returns an empty state. -func LoadState(filePath string) (*State, error) { - state := NewState(filePath) - - data, err := os.ReadFile(filePath) - if err != nil { - if os.IsNotExist(err) { - return state, nil - } - return nil, err - } - - if err := json.Unmarshal(data, state); err != nil { - return nil, err - } - - return state, nil -} - -// SaveState persists the state to the configured file path. -func (s *State) SaveState() error { - s.mu.RLock() - defer s.mu.RUnlock() - - // Ensure the directory exists - dir := filepath.Dir(s.filePath) - if err := os.MkdirAll(dir, 0755); err != nil { - return err - } - - data, err := json.MarshalIndent(s, "", " ") - if err != nil { - return err - } - - return os.WriteFile(s.filePath, data, 0644) -} - -// Add adds a container to the state and persists it. -func (s *State) Add(c *Container) error { - s.mu.Lock() - s.Containers[c.ID] = c - s.mu.Unlock() - - return s.SaveState() -} - -// Get retrieves a container by ID. -func (s *State) Get(id string) (*Container, bool) { - s.mu.RLock() - defer s.mu.RUnlock() - - c, ok := s.Containers[id] - return c, ok -} - -// Update updates a container in the state and persists it. -func (s *State) Update(c *Container) error { - s.mu.Lock() - s.Containers[c.ID] = c - s.mu.Unlock() - - return s.SaveState() -} - -// Remove removes a container from the state and persists it. -func (s *State) Remove(id string) error { - s.mu.Lock() - delete(s.Containers, id) - s.mu.Unlock() - - return s.SaveState() -} - -// All returns all containers in the state. -func (s *State) All() []*Container { - s.mu.RLock() - defer s.mu.RUnlock() - - containers := make([]*Container, 0, len(s.Containers)) - for _, c := range s.Containers { - containers = append(containers, c) - } - return containers -} - -// FilePath returns the path to the state file. -func (s *State) FilePath() string { - return s.filePath -} - -// LogPath returns the log file path for a given container ID. -func LogPath(id string) (string, error) { - logsDir, err := DefaultLogsDir() - if err != nil { - return "", err - } - return filepath.Join(logsDir, id+".log"), nil -} - -// EnsureLogsDir ensures the logs directory exists. -func EnsureLogsDir() error { - logsDir, err := DefaultLogsDir() - if err != nil { - return err - } - return os.MkdirAll(logsDir, 0755) -} diff --git a/pkg/container/state_test.go b/pkg/container/state_test.go deleted file mode 100644 index cf4bf5f..0000000 --- a/pkg/container/state_test.go +++ /dev/null @@ -1,222 +0,0 @@ -package container - -import ( - "os" - "path/filepath" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestNewState_Good(t *testing.T) { - state := NewState("/tmp/test-state.json") - - assert.NotNil(t, state) - assert.NotNil(t, state.Containers) - assert.Equal(t, "/tmp/test-state.json", state.FilePath()) -} - -func TestLoadState_Good_NewFile(t *testing.T) { - // Test loading from non-existent file - tmpDir := t.TempDir() - statePath := filepath.Join(tmpDir, "containers.json") - - state, err := LoadState(statePath) - - require.NoError(t, err) - assert.NotNil(t, state) - assert.Empty(t, state.Containers) -} - -func TestLoadState_Good_ExistingFile(t *testing.T) { - tmpDir := t.TempDir() - statePath := filepath.Join(tmpDir, "containers.json") - - // Create a state file with data - content := `{ - "containers": { - "abc12345": { - "id": "abc12345", - "name": "test-container", - "image": "/path/to/image.iso", - "status": "running", - "pid": 12345, - "started_at": "2024-01-01T00:00:00Z" - } - } - }` - err := os.WriteFile(statePath, []byte(content), 0644) - require.NoError(t, err) - - state, err := LoadState(statePath) - - require.NoError(t, err) - assert.Len(t, state.Containers, 1) - - c, ok := state.Get("abc12345") - assert.True(t, ok) - assert.Equal(t, "test-container", c.Name) - assert.Equal(t, StatusRunning, c.Status) -} - -func TestLoadState_Bad_InvalidJSON(t *testing.T) { - tmpDir := t.TempDir() - statePath := filepath.Join(tmpDir, "containers.json") - - // Create invalid JSON - err := os.WriteFile(statePath, []byte("invalid json{"), 0644) - require.NoError(t, err) - - _, err = LoadState(statePath) - assert.Error(t, err) -} - -func TestState_Add_Good(t *testing.T) { - tmpDir := t.TempDir() - statePath := filepath.Join(tmpDir, "containers.json") - state := NewState(statePath) - - container := &Container{ - ID: "abc12345", - Name: "test", - Image: "/path/to/image.iso", - Status: StatusRunning, - PID: 12345, - StartedAt: time.Now(), - } - - err := state.Add(container) - require.NoError(t, err) - - // Verify it's in memory - c, ok := state.Get("abc12345") - assert.True(t, ok) - assert.Equal(t, container.Name, c.Name) - - // Verify file was created - _, err = os.Stat(statePath) - assert.NoError(t, err) -} - -func TestState_Update_Good(t *testing.T) { - tmpDir := t.TempDir() - statePath := filepath.Join(tmpDir, "containers.json") - state := NewState(statePath) - - container := &Container{ - ID: "abc12345", - Status: StatusRunning, - } - state.Add(container) - - // Update status - container.Status = StatusStopped - err := state.Update(container) - require.NoError(t, err) - - // Verify update - c, ok := state.Get("abc12345") - assert.True(t, ok) - assert.Equal(t, StatusStopped, c.Status) -} - -func TestState_Remove_Good(t *testing.T) { - tmpDir := t.TempDir() - statePath := filepath.Join(tmpDir, "containers.json") - state := NewState(statePath) - - container := &Container{ - ID: "abc12345", - } - state.Add(container) - - err := state.Remove("abc12345") - require.NoError(t, err) - - _, ok := state.Get("abc12345") - assert.False(t, ok) -} - -func TestState_Get_Bad_NotFound(t *testing.T) { - state := NewState("/tmp/test-state.json") - - _, ok := state.Get("nonexistent") - assert.False(t, ok) -} - -func TestState_All_Good(t *testing.T) { - tmpDir := t.TempDir() - statePath := filepath.Join(tmpDir, "containers.json") - state := NewState(statePath) - - state.Add(&Container{ID: "aaa11111"}) - state.Add(&Container{ID: "bbb22222"}) - state.Add(&Container{ID: "ccc33333"}) - - all := state.All() - assert.Len(t, all, 3) -} - -func TestState_SaveState_Good_CreatesDirectory(t *testing.T) { - tmpDir := t.TempDir() - nestedPath := filepath.Join(tmpDir, "nested", "dir", "containers.json") - state := NewState(nestedPath) - - state.Add(&Container{ID: "abc12345"}) - - err := state.SaveState() - require.NoError(t, err) - - // Verify directory was created - _, err = os.Stat(filepath.Dir(nestedPath)) - assert.NoError(t, err) -} - -func TestDefaultStateDir_Good(t *testing.T) { - dir, err := DefaultStateDir() - require.NoError(t, err) - assert.Contains(t, dir, ".core") -} - -func TestDefaultStatePath_Good(t *testing.T) { - path, err := DefaultStatePath() - require.NoError(t, err) - assert.Contains(t, path, "containers.json") -} - -func TestDefaultLogsDir_Good(t *testing.T) { - dir, err := DefaultLogsDir() - require.NoError(t, err) - assert.Contains(t, dir, "logs") -} - -func TestLogPath_Good(t *testing.T) { - path, err := LogPath("abc12345") - require.NoError(t, err) - assert.Contains(t, path, "abc12345.log") -} - -func TestEnsureLogsDir_Good(t *testing.T) { - // This test creates real directories - skip in CI if needed - err := EnsureLogsDir() - assert.NoError(t, err) - - logsDir, _ := DefaultLogsDir() - _, err = os.Stat(logsDir) - assert.NoError(t, err) -} - -func TestGenerateID_Good(t *testing.T) { - id1, err := GenerateID() - require.NoError(t, err) - assert.Len(t, id1, 8) - - id2, err := GenerateID() - require.NoError(t, err) - assert.Len(t, id2, 8) - - // IDs should be different - assert.NotEqual(t, id1, id2) -} diff --git a/pkg/container/templates.go b/pkg/container/templates.go deleted file mode 100644 index b0068a0..0000000 --- a/pkg/container/templates.go +++ /dev/null @@ -1,299 +0,0 @@ -package container - -import ( - "embed" - "fmt" - "os" - "path/filepath" - "regexp" - "strings" -) - -//go:embed templates/*.yml -var embeddedTemplates embed.FS - -// Template represents a LinuxKit YAML template. -type Template struct { - // Name is the template identifier (e.g., "core-dev", "server-php"). - Name string - // Description is a human-readable description of the template. - Description string - // Path is the file path to the template (relative or absolute). - Path string -} - -// builtinTemplates defines the metadata for embedded templates. -var builtinTemplates = []Template{ - { - Name: "core-dev", - Description: "Development environment with Go, Node.js, PHP, Docker-in-LinuxKit, and SSH access", - Path: "templates/core-dev.yml", - }, - { - Name: "server-php", - Description: "Production PHP server with FrankenPHP, Caddy reverse proxy, and health checks", - Path: "templates/server-php.yml", - }, -} - -// ListTemplates returns all available LinuxKit templates. -// It combines embedded templates with any templates found in the user's -// .core/linuxkit directory. -func ListTemplates() []Template { - templates := make([]Template, len(builtinTemplates)) - copy(templates, builtinTemplates) - - // Check for user templates in .core/linuxkit/ - userTemplatesDir := getUserTemplatesDir() - if userTemplatesDir != "" { - userTemplates := scanUserTemplates(userTemplatesDir) - templates = append(templates, userTemplates...) - } - - return templates -} - -// GetTemplate returns the content of a template by name. -// It first checks embedded templates, then user templates. -func GetTemplate(name string) (string, error) { - // Check embedded templates first - for _, t := range builtinTemplates { - if t.Name == name { - content, err := embeddedTemplates.ReadFile(t.Path) - if err != nil { - return "", fmt.Errorf("failed to read embedded template %s: %w", name, err) - } - return string(content), nil - } - } - - // Check user templates - userTemplatesDir := getUserTemplatesDir() - if userTemplatesDir != "" { - templatePath := filepath.Join(userTemplatesDir, name+".yml") - if _, err := os.Stat(templatePath); err == nil { - content, err := os.ReadFile(templatePath) - if err != nil { - return "", fmt.Errorf("failed to read user template %s: %w", name, err) - } - return string(content), nil - } - } - - return "", fmt.Errorf("template not found: %s", name) -} - -// ApplyTemplate applies variable substitution to a template. -// It supports two syntaxes: -// - ${VAR} - required variable, returns error if not provided -// - ${VAR:-default} - variable with default value -func ApplyTemplate(name string, vars map[string]string) (string, error) { - content, err := GetTemplate(name) - if err != nil { - return "", err - } - - return ApplyVariables(content, vars) -} - -// ApplyVariables applies variable substitution to content string. -// It supports two syntaxes: -// - ${VAR} - required variable, returns error if not provided -// - ${VAR:-default} - variable with default value -func ApplyVariables(content string, vars map[string]string) (string, error) { - // Pattern for ${VAR:-default} syntax - defaultPattern := regexp.MustCompile(`\$\{([A-Za-z_][A-Za-z0-9_]*):-([^}]*)\}`) - - // Pattern for ${VAR} syntax (no default) - requiredPattern := regexp.MustCompile(`\$\{([A-Za-z_][A-Za-z0-9_]*)\}`) - - // Track missing required variables - var missingVars []string - - // First pass: replace variables with defaults - result := defaultPattern.ReplaceAllStringFunc(content, func(match string) string { - submatch := defaultPattern.FindStringSubmatch(match) - if len(submatch) != 3 { - return match - } - varName := submatch[1] - defaultVal := submatch[2] - - if val, ok := vars[varName]; ok { - return val - } - return defaultVal - }) - - // Second pass: replace required variables and track missing ones - result = requiredPattern.ReplaceAllStringFunc(result, func(match string) string { - submatch := requiredPattern.FindStringSubmatch(match) - if len(submatch) != 2 { - return match - } - varName := submatch[1] - - if val, ok := vars[varName]; ok { - return val - } - missingVars = append(missingVars, varName) - return match // Keep original if missing - }) - - if len(missingVars) > 0 { - return "", fmt.Errorf("missing required variables: %s", strings.Join(missingVars, ", ")) - } - - return result, nil -} - -// ExtractVariables extracts all variable names from a template. -// Returns two slices: required variables and optional variables (with defaults). -func ExtractVariables(content string) (required []string, optional map[string]string) { - optional = make(map[string]string) - requiredSet := make(map[string]bool) - - // Pattern for ${VAR:-default} syntax - defaultPattern := regexp.MustCompile(`\$\{([A-Za-z_][A-Za-z0-9_]*):-([^}]*)\}`) - - // Pattern for ${VAR} syntax (no default) - requiredPattern := regexp.MustCompile(`\$\{([A-Za-z_][A-Za-z0-9_]*)\}`) - - // Find optional variables with defaults - matches := defaultPattern.FindAllStringSubmatch(content, -1) - for _, match := range matches { - if len(match) == 3 { - optional[match[1]] = match[2] - } - } - - // Find required variables - matches = requiredPattern.FindAllStringSubmatch(content, -1) - for _, match := range matches { - if len(match) == 2 { - varName := match[1] - // Only add if not already in optional (with default) - if _, hasDefault := optional[varName]; !hasDefault { - requiredSet[varName] = true - } - } - } - - // Convert set to slice - for v := range requiredSet { - required = append(required, v) - } - - return required, optional -} - -// getUserTemplatesDir returns the path to user templates directory. -// Returns empty string if the directory doesn't exist. -func getUserTemplatesDir() string { - // Try workspace-relative .core/linuxkit first - cwd, err := os.Getwd() - if err == nil { - wsDir := filepath.Join(cwd, ".core", "linuxkit") - if info, err := os.Stat(wsDir); err == nil && info.IsDir() { - return wsDir - } - } - - // Try home directory - home, err := os.UserHomeDir() - if err != nil { - return "" - } - - homeDir := filepath.Join(home, ".core", "linuxkit") - if info, err := os.Stat(homeDir); err == nil && info.IsDir() { - return homeDir - } - - return "" -} - -// scanUserTemplates scans a directory for .yml template files. -func scanUserTemplates(dir string) []Template { - var templates []Template - - entries, err := os.ReadDir(dir) - if err != nil { - return templates - } - - for _, entry := range entries { - if entry.IsDir() { - continue - } - - name := entry.Name() - if !strings.HasSuffix(name, ".yml") && !strings.HasSuffix(name, ".yaml") { - continue - } - - // Extract template name from filename - templateName := strings.TrimSuffix(strings.TrimSuffix(name, ".yml"), ".yaml") - - // Skip if this is a builtin template name (embedded takes precedence) - isBuiltin := false - for _, bt := range builtinTemplates { - if bt.Name == templateName { - isBuiltin = true - break - } - } - if isBuiltin { - continue - } - - // Read file to extract description from comments - description := extractTemplateDescription(filepath.Join(dir, name)) - if description == "" { - description = "User-defined template" - } - - templates = append(templates, Template{ - Name: templateName, - Description: description, - Path: filepath.Join(dir, name), - }) - } - - return templates -} - -// extractTemplateDescription reads the first comment block from a YAML file -// to use as a description. -func extractTemplateDescription(path string) string { - content, err := os.ReadFile(path) - if err != nil { - return "" - } - - lines := strings.Split(string(content), "\n") - var descLines []string - - for _, line := range lines { - trimmed := strings.TrimSpace(line) - if strings.HasPrefix(trimmed, "#") { - // Remove the # and trim - comment := strings.TrimSpace(strings.TrimPrefix(trimmed, "#")) - if comment != "" { - descLines = append(descLines, comment) - // Only take the first meaningful comment line as description - if len(descLines) == 1 { - return comment - } - } - } else if trimmed != "" { - // Hit non-comment content, stop - break - } - } - - if len(descLines) > 0 { - return descLines[0] - } - return "" -} diff --git a/pkg/container/templates/core-dev.yml b/pkg/container/templates/core-dev.yml deleted file mode 100644 index 712e43e..0000000 --- a/pkg/container/templates/core-dev.yml +++ /dev/null @@ -1,121 +0,0 @@ -# Core Development Environment Template -# A full-featured development environment with multiple runtimes -# -# Variables: -# ${SSH_KEY} - SSH public key for access (required) -# ${MEMORY:-2048} - Memory in MB (default: 2048) -# ${CPUS:-2} - Number of CPUs (default: 2) -# ${HOSTNAME:-core-dev} - Hostname for the VM -# ${DATA_SIZE:-10G} - Size of persistent /data volume - -kernel: - image: linuxkit/kernel:6.6.13 - cmdline: "console=tty0 console=ttyS0" - -init: - - linuxkit/init:v1.2.0 - - linuxkit/runc:v1.1.12 - - linuxkit/containerd:v1.7.13 - - linuxkit/ca-certificates:v1.0.0 - -onboot: - - name: sysctl - image: linuxkit/sysctl:v1.0.0 - - name: format - image: linuxkit/format:v1.0.0 - - name: mount - image: linuxkit/mount:v1.0.0 - command: ["/usr/bin/mountie", "/dev/sda1", "/data"] - - name: dhcpcd - image: linuxkit/dhcpcd:v1.0.0 - command: ["/sbin/dhcpcd", "--nobackground", "-f", "/dhcpcd.conf", "-1"] - -onshutdown: - - name: shutdown - image: busybox:latest - command: ["/bin/echo", "Shutting down..."] - -services: - - name: getty - image: linuxkit/getty:v1.0.0 - env: - - INSECURE=true - - - name: sshd - image: linuxkit/sshd:v1.2.0 - binds: - - /etc/ssh/authorized_keys:/root/.ssh/authorized_keys - - - name: docker - image: docker:24.0-dind - capabilities: - - all - net: host - pid: host - binds: - - /var/run:/var/run - - /data/docker:/var/lib/docker - rootfsPropagation: shared - - - name: dev-tools - image: alpine:3.19 - capabilities: - - all - net: host - binds: - - /data:/data - command: - - /bin/sh - - -c - - | - # Install development tools - apk add --no-cache \ - git curl wget vim nano htop tmux \ - build-base gcc musl-dev linux-headers \ - openssh-client jq yq - - # Install Go 1.22.0 - wget -q https://go.dev/dl/go1.22.0.linux-amd64.tar.gz - tar -C /usr/local -xzf go1.22.0.linux-amd64.tar.gz - rm go1.22.0.linux-amd64.tar.gz - echo 'export PATH=/usr/local/go/bin:$PATH' >> /etc/profile - - # Install Node.js - apk add --no-cache nodejs npm - - # Install PHP - apk add --no-cache php82 php82-cli php82-curl php82-json php82-mbstring \ - php82-openssl php82-pdo php82-pdo_mysql php82-pdo_pgsql php82-phar \ - php82-session php82-tokenizer php82-xml php82-zip composer - - # Keep container running - tail -f /dev/null - -files: - - path: /etc/hostname - contents: "${HOSTNAME:-core-dev}" - - path: /etc/ssh/authorized_keys - contents: "${SSH_KEY}" - mode: "0600" - - path: /etc/profile.d/dev.sh - contents: | - export PATH=$PATH:/usr/local/go/bin - export GOPATH=/data/go - export PATH=$PATH:$GOPATH/bin - cd /data - mode: "0755" - - path: /etc/motd - contents: | - ================================================ - Core Development Environment - - Runtimes: Go, Node.js, PHP - Tools: git, curl, vim, docker - - Data directory: /data (persistent) - ================================================ - -trust: - org: - - linuxkit - - library diff --git a/pkg/container/templates/server-php.yml b/pkg/container/templates/server-php.yml deleted file mode 100644 index 9db9f74..0000000 --- a/pkg/container/templates/server-php.yml +++ /dev/null @@ -1,142 +0,0 @@ -# PHP/FrankenPHP Server Template -# A minimal production-ready PHP server with FrankenPHP and Caddy -# -# Variables: -# ${SSH_KEY} - SSH public key for management access (required) -# ${MEMORY:-512} - Memory in MB (default: 512) -# ${CPUS:-1} - Number of CPUs (default: 1) -# ${HOSTNAME:-php-server} - Hostname for the VM -# ${APP_NAME:-app} - Application name -# ${DOMAIN:-localhost} - Domain for SSL certificates -# ${PHP_MEMORY:-128M} - PHP memory limit - -kernel: - image: linuxkit/kernel:6.6.13 - cmdline: "console=tty0 console=ttyS0" - -init: - - linuxkit/init:v1.2.0 - - linuxkit/runc:v1.1.12 - - linuxkit/containerd:v1.7.13 - - linuxkit/ca-certificates:v1.0.0 - -onboot: - - name: sysctl - image: linuxkit/sysctl:v1.0.0 - - name: dhcpcd - image: linuxkit/dhcpcd:v1.0.0 - command: ["/sbin/dhcpcd", "--nobackground", "-f", "/dhcpcd.conf", "-1"] - -services: - - name: sshd - image: linuxkit/sshd:v1.2.0 - binds: - - /etc/ssh/authorized_keys:/root/.ssh/authorized_keys - - - name: frankenphp - image: dunglas/frankenphp:latest - capabilities: - - CAP_NET_BIND_SERVICE - net: host - binds: - - /app:/app - - /data:/data - - /etc/caddy/Caddyfile:/etc/caddy/Caddyfile - env: - - SERVER_NAME=${DOMAIN:-localhost} - - FRANKENPHP_CONFIG=/etc/caddy/Caddyfile - command: - - frankenphp - - run - - --config - - /etc/caddy/Caddyfile - - - name: healthcheck - image: alpine:3.19 - net: host - command: - - /bin/sh - - -c - - | - apk add --no-cache curl - while true; do - sleep 30 - curl -sf http://localhost/health || echo "Health check failed" - done - -files: - - path: /etc/hostname - contents: "${HOSTNAME:-php-server}" - - path: /etc/ssh/authorized_keys - contents: "${SSH_KEY}" - mode: "0600" - - path: /etc/caddy/Caddyfile - contents: | - { - frankenphp - order php_server before file_server - } - - ${DOMAIN:-localhost} { - root * /app/public - - # Health check endpoint - handle /health { - respond "OK" 200 - } - - # PHP handling - php_server - - # Encode responses - encode zstd gzip - - # Security headers - header { - X-Content-Type-Options nosniff - X-Frame-Options DENY - X-XSS-Protection "1; mode=block" - Referrer-Policy strict-origin-when-cross-origin - } - - # Logging - log { - output file /data/logs/access.log - format json - } - } - mode: "0644" - - path: /app/public/index.php - contents: | - 'healthy', - 'app' => '${APP_NAME:-app}', - 'timestamp' => date('c'), - 'php_version' => PHP_VERSION, - ]); - mode: "0644" - - path: /etc/php/php.ini - contents: | - memory_limit = ${PHP_MEMORY:-128M} - max_execution_time = 30 - upload_max_filesize = 64M - post_max_size = 64M - display_errors = Off - log_errors = On - error_log = /data/logs/php_errors.log - mode: "0644" - - path: /data/logs/.gitkeep - contents: "" - -trust: - org: - - linuxkit - - library - - dunglas diff --git a/pkg/container/templates_test.go b/pkg/container/templates_test.go deleted file mode 100644 index 5825863..0000000 --- a/pkg/container/templates_test.go +++ /dev/null @@ -1,583 +0,0 @@ -package container - -import ( - "os" - "path/filepath" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestListTemplates_Good(t *testing.T) { - templates := ListTemplates() - - // Should have at least the builtin templates - assert.GreaterOrEqual(t, len(templates), 2) - - // Find the core-dev template - var found bool - for _, tmpl := range templates { - if tmpl.Name == "core-dev" { - found = true - assert.NotEmpty(t, tmpl.Description) - assert.NotEmpty(t, tmpl.Path) - break - } - } - assert.True(t, found, "core-dev template should exist") - - // Find the server-php template - found = false - for _, tmpl := range templates { - if tmpl.Name == "server-php" { - found = true - assert.NotEmpty(t, tmpl.Description) - assert.NotEmpty(t, tmpl.Path) - break - } - } - assert.True(t, found, "server-php template should exist") -} - -func TestGetTemplate_Good_CoreDev(t *testing.T) { - content, err := GetTemplate("core-dev") - - require.NoError(t, err) - assert.NotEmpty(t, content) - assert.Contains(t, content, "kernel:") - assert.Contains(t, content, "linuxkit/kernel") - assert.Contains(t, content, "${SSH_KEY}") - assert.Contains(t, content, "services:") -} - -func TestGetTemplate_Good_ServerPhp(t *testing.T) { - content, err := GetTemplate("server-php") - - require.NoError(t, err) - assert.NotEmpty(t, content) - assert.Contains(t, content, "kernel:") - assert.Contains(t, content, "frankenphp") - assert.Contains(t, content, "${SSH_KEY}") - assert.Contains(t, content, "${DOMAIN:-localhost}") -} - -func TestGetTemplate_Bad_NotFound(t *testing.T) { - _, err := GetTemplate("nonexistent-template") - - assert.Error(t, err) - assert.Contains(t, err.Error(), "template not found") -} - -func TestApplyVariables_Good_SimpleSubstitution(t *testing.T) { - content := "Hello ${NAME}, welcome to ${PLACE}!" - vars := map[string]string{ - "NAME": "World", - "PLACE": "Core", - } - - result, err := ApplyVariables(content, vars) - - require.NoError(t, err) - assert.Equal(t, "Hello World, welcome to Core!", result) -} - -func TestApplyVariables_Good_WithDefaults(t *testing.T) { - content := "Memory: ${MEMORY:-1024}MB, CPUs: ${CPUS:-2}" - vars := map[string]string{ - "MEMORY": "2048", - // CPUS not provided, should use default - } - - result, err := ApplyVariables(content, vars) - - require.NoError(t, err) - assert.Equal(t, "Memory: 2048MB, CPUs: 2", result) -} - -func TestApplyVariables_Good_AllDefaults(t *testing.T) { - content := "${HOST:-localhost}:${PORT:-8080}" - vars := map[string]string{} // No vars provided - - result, err := ApplyVariables(content, vars) - - require.NoError(t, err) - assert.Equal(t, "localhost:8080", result) -} - -func TestApplyVariables_Good_MixedSyntax(t *testing.T) { - content := ` -hostname: ${HOSTNAME:-myhost} -ssh_key: ${SSH_KEY} -memory: ${MEMORY:-512} -` - vars := map[string]string{ - "SSH_KEY": "ssh-rsa AAAA...", - "HOSTNAME": "custom-host", - } - - result, err := ApplyVariables(content, vars) - - require.NoError(t, err) - assert.Contains(t, result, "hostname: custom-host") - assert.Contains(t, result, "ssh_key: ssh-rsa AAAA...") - assert.Contains(t, result, "memory: 512") -} - -func TestApplyVariables_Good_EmptyDefault(t *testing.T) { - content := "value: ${OPT:-}" - vars := map[string]string{} - - result, err := ApplyVariables(content, vars) - - require.NoError(t, err) - assert.Equal(t, "value: ", result) -} - -func TestApplyVariables_Bad_MissingRequired(t *testing.T) { - content := "SSH Key: ${SSH_KEY}" - vars := map[string]string{} // Missing required SSH_KEY - - _, err := ApplyVariables(content, vars) - - assert.Error(t, err) - assert.Contains(t, err.Error(), "missing required variables") - assert.Contains(t, err.Error(), "SSH_KEY") -} - -func TestApplyVariables_Bad_MultipleMissing(t *testing.T) { - content := "${VAR1} and ${VAR2} and ${VAR3}" - vars := map[string]string{ - "VAR2": "provided", - } - - _, err := ApplyVariables(content, vars) - - assert.Error(t, err) - assert.Contains(t, err.Error(), "missing required variables") - // Should mention both missing vars - errStr := err.Error() - assert.True(t, strings.Contains(errStr, "VAR1") || strings.Contains(errStr, "VAR3")) -} - -func TestApplyTemplate_Good(t *testing.T) { - vars := map[string]string{ - "SSH_KEY": "ssh-rsa AAAA... user@host", - } - - result, err := ApplyTemplate("core-dev", vars) - - require.NoError(t, err) - assert.NotEmpty(t, result) - assert.Contains(t, result, "ssh-rsa AAAA... user@host") - // Default values should be applied - assert.Contains(t, result, "core-dev") // HOSTNAME default -} - -func TestApplyTemplate_Bad_TemplateNotFound(t *testing.T) { - vars := map[string]string{ - "SSH_KEY": "test", - } - - _, err := ApplyTemplate("nonexistent", vars) - - assert.Error(t, err) - assert.Contains(t, err.Error(), "template not found") -} - -func TestApplyTemplate_Bad_MissingVariable(t *testing.T) { - // server-php requires SSH_KEY - vars := map[string]string{} // Missing required SSH_KEY - - _, err := ApplyTemplate("server-php", vars) - - assert.Error(t, err) - assert.Contains(t, err.Error(), "missing required variables") -} - -func TestExtractVariables_Good(t *testing.T) { - content := ` -hostname: ${HOSTNAME:-myhost} -ssh_key: ${SSH_KEY} -memory: ${MEMORY:-1024} -cpus: ${CPUS:-2} -api_key: ${API_KEY} -` - required, optional := ExtractVariables(content) - - // Required variables (no default) - assert.Contains(t, required, "SSH_KEY") - assert.Contains(t, required, "API_KEY") - assert.Len(t, required, 2) - - // Optional variables (with defaults) - assert.Equal(t, "myhost", optional["HOSTNAME"]) - assert.Equal(t, "1024", optional["MEMORY"]) - assert.Equal(t, "2", optional["CPUS"]) - assert.Len(t, optional, 3) -} - -func TestExtractVariables_Good_NoVariables(t *testing.T) { - content := "This has no variables at all" - - required, optional := ExtractVariables(content) - - assert.Empty(t, required) - assert.Empty(t, optional) -} - -func TestExtractVariables_Good_OnlyDefaults(t *testing.T) { - content := "${A:-default1} ${B:-default2}" - - required, optional := ExtractVariables(content) - - assert.Empty(t, required) - assert.Len(t, optional, 2) - assert.Equal(t, "default1", optional["A"]) - assert.Equal(t, "default2", optional["B"]) -} - -func TestScanUserTemplates_Good(t *testing.T) { - // Create a temporary directory with template files - tmpDir := t.TempDir() - - // Create a valid template file - templateContent := `# My Custom Template -# A custom template for testing -kernel: - image: linuxkit/kernel:6.6 -` - err := os.WriteFile(filepath.Join(tmpDir, "custom.yml"), []byte(templateContent), 0644) - require.NoError(t, err) - - // Create a non-template file (should be ignored) - err = os.WriteFile(filepath.Join(tmpDir, "readme.txt"), []byte("Not a template"), 0644) - require.NoError(t, err) - - templates := scanUserTemplates(tmpDir) - - assert.Len(t, templates, 1) - assert.Equal(t, "custom", templates[0].Name) - assert.Equal(t, "My Custom Template", templates[0].Description) -} - -func TestScanUserTemplates_Good_MultipleTemplates(t *testing.T) { - tmpDir := t.TempDir() - - // Create multiple template files - err := os.WriteFile(filepath.Join(tmpDir, "web.yml"), []byte("# Web Server\nkernel:"), 0644) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(tmpDir, "db.yaml"), []byte("# Database Server\nkernel:"), 0644) - require.NoError(t, err) - - templates := scanUserTemplates(tmpDir) - - assert.Len(t, templates, 2) - - // Check names are extracted correctly - names := make(map[string]bool) - for _, tmpl := range templates { - names[tmpl.Name] = true - } - assert.True(t, names["web"]) - assert.True(t, names["db"]) -} - -func TestScanUserTemplates_Good_EmptyDirectory(t *testing.T) { - tmpDir := t.TempDir() - - templates := scanUserTemplates(tmpDir) - - assert.Empty(t, templates) -} - -func TestScanUserTemplates_Bad_NonexistentDirectory(t *testing.T) { - templates := scanUserTemplates("/nonexistent/path/to/templates") - - assert.Empty(t, templates) -} - -func TestExtractTemplateDescription_Good(t *testing.T) { - tmpDir := t.TempDir() - path := filepath.Join(tmpDir, "test.yml") - - content := `# My Template Description -# More details here -kernel: - image: test -` - err := os.WriteFile(path, []byte(content), 0644) - require.NoError(t, err) - - desc := extractTemplateDescription(path) - - assert.Equal(t, "My Template Description", desc) -} - -func TestExtractTemplateDescription_Good_NoComments(t *testing.T) { - tmpDir := t.TempDir() - path := filepath.Join(tmpDir, "test.yml") - - content := `kernel: - image: test -` - err := os.WriteFile(path, []byte(content), 0644) - require.NoError(t, err) - - desc := extractTemplateDescription(path) - - assert.Empty(t, desc) -} - -func TestExtractTemplateDescription_Bad_FileNotFound(t *testing.T) { - desc := extractTemplateDescription("/nonexistent/file.yml") - - assert.Empty(t, desc) -} - -func TestVariablePatternEdgeCases_Good(t *testing.T) { - tests := []struct { - name string - content string - vars map[string]string - expected string - }{ - { - name: "underscore in name", - content: "${MY_VAR:-default}", - vars: map[string]string{"MY_VAR": "value"}, - expected: "value", - }, - { - name: "numbers in name", - content: "${VAR123:-default}", - vars: map[string]string{}, - expected: "default", - }, - { - name: "default with special chars", - content: "${URL:-http://localhost:8080}", - vars: map[string]string{}, - expected: "http://localhost:8080", - }, - { - name: "default with path", - content: "${PATH:-/usr/local/bin}", - vars: map[string]string{}, - expected: "/usr/local/bin", - }, - { - name: "adjacent variables", - content: "${A:-a}${B:-b}${C:-c}", - vars: map[string]string{"B": "X"}, - expected: "aXc", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result, err := ApplyVariables(tt.content, tt.vars) - require.NoError(t, err) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestListTemplates_Good_WithUserTemplates(t *testing.T) { - // Create a workspace directory with user templates - tmpDir := t.TempDir() - coreDir := filepath.Join(tmpDir, ".core", "linuxkit") - err := os.MkdirAll(coreDir, 0755) - require.NoError(t, err) - - // Create a user template - templateContent := `# Custom user template -kernel: - image: linuxkit/kernel:6.6 -` - err = os.WriteFile(filepath.Join(coreDir, "user-custom.yml"), []byte(templateContent), 0644) - require.NoError(t, err) - - // Change to the temp directory - oldWd, err := os.Getwd() - require.NoError(t, err) - err = os.Chdir(tmpDir) - require.NoError(t, err) - defer os.Chdir(oldWd) - - templates := ListTemplates() - - // Should have at least the builtin templates plus the user template - assert.GreaterOrEqual(t, len(templates), 3) - - // Check that user template is included - found := false - for _, tmpl := range templates { - if tmpl.Name == "user-custom" { - found = true - assert.Equal(t, "Custom user template", tmpl.Description) - break - } - } - assert.True(t, found, "user-custom template should exist") -} - -func TestGetTemplate_Good_UserTemplate(t *testing.T) { - // Create a workspace directory with user templates - tmpDir := t.TempDir() - coreDir := filepath.Join(tmpDir, ".core", "linuxkit") - err := os.MkdirAll(coreDir, 0755) - require.NoError(t, err) - - // Create a user template - templateContent := `# My user template -kernel: - image: linuxkit/kernel:6.6 -services: - - name: test -` - err = os.WriteFile(filepath.Join(coreDir, "my-user-template.yml"), []byte(templateContent), 0644) - require.NoError(t, err) - - // Change to the temp directory - oldWd, err := os.Getwd() - require.NoError(t, err) - err = os.Chdir(tmpDir) - require.NoError(t, err) - defer os.Chdir(oldWd) - - content, err := GetTemplate("my-user-template") - - require.NoError(t, err) - assert.Contains(t, content, "kernel:") - assert.Contains(t, content, "My user template") -} - -func TestScanUserTemplates_Good_SkipsBuiltinNames(t *testing.T) { - tmpDir := t.TempDir() - - // Create a template with a builtin name (should be skipped) - err := os.WriteFile(filepath.Join(tmpDir, "core-dev.yml"), []byte("# Duplicate\nkernel:"), 0644) - require.NoError(t, err) - - // Create a unique template - err = os.WriteFile(filepath.Join(tmpDir, "unique.yml"), []byte("# Unique\nkernel:"), 0644) - require.NoError(t, err) - - templates := scanUserTemplates(tmpDir) - - // Should only have the unique template, not the builtin name - assert.Len(t, templates, 1) - assert.Equal(t, "unique", templates[0].Name) -} - -func TestScanUserTemplates_Good_SkipsDirectories(t *testing.T) { - tmpDir := t.TempDir() - - // Create a subdirectory (should be skipped) - err := os.MkdirAll(filepath.Join(tmpDir, "subdir"), 0755) - require.NoError(t, err) - - // Create a valid template - err = os.WriteFile(filepath.Join(tmpDir, "valid.yml"), []byte("# Valid\nkernel:"), 0644) - require.NoError(t, err) - - templates := scanUserTemplates(tmpDir) - - assert.Len(t, templates, 1) - assert.Equal(t, "valid", templates[0].Name) -} - -func TestScanUserTemplates_Good_YamlExtension(t *testing.T) { - tmpDir := t.TempDir() - - // Create templates with both extensions - err := os.WriteFile(filepath.Join(tmpDir, "template1.yml"), []byte("# Template 1\nkernel:"), 0644) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(tmpDir, "template2.yaml"), []byte("# Template 2\nkernel:"), 0644) - require.NoError(t, err) - - templates := scanUserTemplates(tmpDir) - - assert.Len(t, templates, 2) - - names := make(map[string]bool) - for _, tmpl := range templates { - names[tmpl.Name] = true - } - assert.True(t, names["template1"]) - assert.True(t, names["template2"]) -} - -func TestExtractTemplateDescription_Good_EmptyComment(t *testing.T) { - tmpDir := t.TempDir() - path := filepath.Join(tmpDir, "test.yml") - - // First comment is empty, second has content - content := `# -# Actual description here -kernel: - image: test -` - err := os.WriteFile(path, []byte(content), 0644) - require.NoError(t, err) - - desc := extractTemplateDescription(path) - - assert.Equal(t, "Actual description here", desc) -} - -func TestExtractTemplateDescription_Good_MultipleEmptyComments(t *testing.T) { - tmpDir := t.TempDir() - path := filepath.Join(tmpDir, "test.yml") - - // Multiple empty comments before actual content - content := `# -# -# -# Real description -kernel: - image: test -` - err := os.WriteFile(path, []byte(content), 0644) - require.NoError(t, err) - - desc := extractTemplateDescription(path) - - assert.Equal(t, "Real description", desc) -} - -func TestGetUserTemplatesDir_Good_NoDirectory(t *testing.T) { - // Save current working directory - oldWd, err := os.Getwd() - require.NoError(t, err) - - // Create a temp directory without .core/linuxkit - tmpDir := t.TempDir() - err = os.Chdir(tmpDir) - require.NoError(t, err) - defer os.Chdir(oldWd) - - dir := getUserTemplatesDir() - - // Should return empty string since no templates dir exists - // (unless home dir has one) - assert.True(t, dir == "" || strings.Contains(dir, "linuxkit")) -} - -func TestScanUserTemplates_Good_DefaultDescription(t *testing.T) { - tmpDir := t.TempDir() - - // Create a template without comments - content := `kernel: - image: test -` - err := os.WriteFile(filepath.Join(tmpDir, "nocomment.yml"), []byte(content), 0644) - require.NoError(t, err) - - templates := scanUserTemplates(tmpDir) - - assert.Len(t, templates, 1) - assert.Equal(t, "User-defined template", templates[0].Description) -} diff --git a/pkg/dev/cmd_api.go b/pkg/dev/cmd_api.go deleted file mode 100644 index 559489f..0000000 --- a/pkg/dev/cmd_api.go +++ /dev/null @@ -1,22 +0,0 @@ -package dev - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -// addAPICommands adds the 'api' command and its subcommands to the given parent command. -func addAPICommands(parent *cli.Command) { - // Create the 'api' command - apiCmd := &cli.Command{ - Use: "api", - Short: i18n.T("cmd.dev.api.short"), - } - parent.AddCommand(apiCmd) - - // Add the 'sync' command to 'api' - addSyncCommand(apiCmd) - - // TODO: Add the 'test-gen' command to 'api' - // addTestGenCommand(apiCmd) -} diff --git a/pkg/dev/cmd_apply.go b/pkg/dev/cmd_apply.go deleted file mode 100644 index ac03eb9..0000000 --- a/pkg/dev/cmd_apply.go +++ /dev/null @@ -1,289 +0,0 @@ -// cmd_apply.go implements safe command/script execution across repos for AI agents. -// -// Usage: -// core dev apply --command="sed -i 's/old/new/g' README.md" -// core dev apply --script="./scripts/update-version.sh" -// core dev apply --command="..." --commit --message="chore: update" - -package dev - -import ( - "context" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/errors" - "github.com/host-uk/core/pkg/git" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" -) - -// Apply command flags -var ( - applyCommand string - applyScript string - applyRepos string - applyCommit bool - applyMessage string - applyCoAuthor string - applyDryRun bool - applyPush bool - applyContinue bool // Continue on error -) - -// addApplyCommand adds the 'apply' command to dev. -func addApplyCommand(parent *cli.Command) { - applyCmd := &cli.Command{ - Use: "apply", - Short: i18n.T("cmd.dev.apply.short"), - Long: i18n.T("cmd.dev.apply.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runApply() - }, - } - - applyCmd.Flags().StringVar(&applyCommand, "command", "", i18n.T("cmd.dev.apply.flag.command")) - applyCmd.Flags().StringVar(&applyScript, "script", "", i18n.T("cmd.dev.apply.flag.script")) - applyCmd.Flags().StringVar(&applyRepos, "repos", "", i18n.T("cmd.dev.apply.flag.repos")) - applyCmd.Flags().BoolVar(&applyCommit, "commit", false, i18n.T("cmd.dev.apply.flag.commit")) - applyCmd.Flags().StringVarP(&applyMessage, "message", "m", "", i18n.T("cmd.dev.apply.flag.message")) - applyCmd.Flags().StringVar(&applyCoAuthor, "co-author", "", i18n.T("cmd.dev.apply.flag.co_author")) - applyCmd.Flags().BoolVar(&applyDryRun, "dry-run", false, i18n.T("cmd.dev.apply.flag.dry_run")) - applyCmd.Flags().BoolVar(&applyPush, "push", false, i18n.T("cmd.dev.apply.flag.push")) - applyCmd.Flags().BoolVar(&applyContinue, "continue", false, i18n.T("cmd.dev.apply.flag.continue")) - - parent.AddCommand(applyCmd) -} - -func runApply() error { - ctx := context.Background() - - // Validate inputs - if applyCommand == "" && applyScript == "" { - return errors.E("dev.apply", i18n.T("cmd.dev.apply.error.no_command"), nil) - } - if applyCommand != "" && applyScript != "" { - return errors.E("dev.apply", i18n.T("cmd.dev.apply.error.both_command_script"), nil) - } - if applyCommit && applyMessage == "" { - return errors.E("dev.apply", i18n.T("cmd.dev.apply.error.commit_needs_message"), nil) - } - - // Validate script exists - if applyScript != "" { - if _, err := os.Stat(applyScript); err != nil { - return errors.E("dev.apply", "script not found: "+applyScript, err) - } - } - - // Get target repos - targetRepos, err := getApplyTargetRepos() - if err != nil { - return err - } - - if len(targetRepos) == 0 { - return errors.E("dev.apply", i18n.T("cmd.dev.apply.error.no_repos"), nil) - } - - // Show plan - action := applyCommand - if applyScript != "" { - action = applyScript - } - cli.Print("%s: %s\n", dimStyle.Render(i18n.T("cmd.dev.apply.action")), action) - cli.Print("%s: %d repos\n", dimStyle.Render(i18n.T("cmd.dev.apply.targets")), len(targetRepos)) - if applyDryRun { - cli.Print("%s\n", warningStyle.Render(i18n.T("cmd.dev.apply.dry_run_mode"))) - } - cli.Blank() - - var succeeded, skipped, failed int - - for _, repo := range targetRepos { - repoName := filepath.Base(repo.Path) - - if applyDryRun { - cli.Print(" %s %s\n", dimStyle.Render("[dry-run]"), repoName) - succeeded++ - continue - } - - // Step 1: Run command or script - var cmdErr error - if applyCommand != "" { - cmdErr = runCommandInRepo(ctx, repo.Path, applyCommand) - } else { - cmdErr = runScriptInRepo(ctx, repo.Path, applyScript) - } - - if cmdErr != nil { - cli.Print(" %s %s: %s\n", errorStyle.Render("x"), repoName, cmdErr) - failed++ - if !applyContinue { - return cli.Err("%s", i18n.T("cmd.dev.apply.error.command_failed")) - } - continue - } - - // Step 2: Check if anything changed - statuses := git.Status(ctx, git.StatusOptions{ - Paths: []string{repo.Path}, - Names: map[string]string{repo.Path: repoName}, - }) - if len(statuses) == 0 || !statuses[0].IsDirty() { - cli.Print(" %s %s: %s\n", dimStyle.Render("-"), repoName, i18n.T("cmd.dev.apply.no_changes")) - skipped++ - continue - } - - // Step 3: Commit if requested - if applyCommit { - commitMsg := applyMessage - if applyCoAuthor != "" { - commitMsg += "\n\nCo-Authored-By: " + applyCoAuthor - } - - // Stage all changes - if _, err := gitCommandQuiet(ctx, repo.Path, "add", "-A"); err != nil { - cli.Print(" %s %s: stage failed: %s\n", errorStyle.Render("x"), repoName, err) - failed++ - if !applyContinue { - return err - } - continue - } - - // Commit - if _, err := gitCommandQuiet(ctx, repo.Path, "commit", "-m", commitMsg); err != nil { - cli.Print(" %s %s: commit failed: %s\n", errorStyle.Render("x"), repoName, err) - failed++ - if !applyContinue { - return err - } - continue - } - - // Step 4: Push if requested - if applyPush { - if err := safePush(ctx, repo.Path); err != nil { - cli.Print(" %s %s: push failed: %s\n", errorStyle.Render("x"), repoName, err) - failed++ - if !applyContinue { - return err - } - continue - } - } - } - - cli.Print(" %s %s\n", successStyle.Render("v"), repoName) - succeeded++ - } - - // Summary - cli.Blank() - cli.Print("%s: ", i18n.T("cmd.dev.apply.summary")) - if succeeded > 0 { - cli.Print("%s", successStyle.Render(i18n.T("common.count.succeeded", map[string]interface{}{"Count": succeeded}))) - } - if skipped > 0 { - if succeeded > 0 { - cli.Print(", ") - } - cli.Print("%s", dimStyle.Render(i18n.T("common.count.skipped", map[string]interface{}{"Count": skipped}))) - } - if failed > 0 { - if succeeded > 0 || skipped > 0 { - cli.Print(", ") - } - cli.Print("%s", errorStyle.Render(i18n.T("common.count.failed", map[string]interface{}{"Count": failed}))) - } - cli.Blank() - - return nil -} - -// getApplyTargetRepos gets repos to apply command to -func getApplyTargetRepos() ([]*repos.Repo, error) { - // Load registry - registryPath, err := repos.FindRegistry() - if err != nil { - return nil, errors.E("dev.apply", "failed to find registry", err) - } - - registry, err := repos.LoadRegistry(registryPath) - if err != nil { - return nil, errors.E("dev.apply", "failed to load registry", err) - } - - // If --repos specified, filter to those - if applyRepos != "" { - repoNames := strings.Split(applyRepos, ",") - nameSet := make(map[string]bool) - for _, name := range repoNames { - nameSet[strings.TrimSpace(name)] = true - } - - var matched []*repos.Repo - for _, repo := range registry.Repos { - if nameSet[repo.Name] { - matched = append(matched, repo) - } - } - return matched, nil - } - - // Return all repos as slice - var all []*repos.Repo - for _, repo := range registry.Repos { - all = append(all, repo) - } - return all, nil -} - -// runCommandInRepo runs a shell command in a repo directory -func runCommandInRepo(ctx context.Context, repoPath, command string) error { - // Use shell to execute command - var cmd *exec.Cmd - if isWindows() { - cmd = exec.CommandContext(ctx, "cmd", "/C", command) - } else { - cmd = exec.CommandContext(ctx, "sh", "-c", command) - } - cmd.Dir = repoPath - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - return cmd.Run() -} - -// runScriptInRepo runs a script in a repo directory -func runScriptInRepo(ctx context.Context, repoPath, scriptPath string) error { - // Get absolute path to script - absScript, err := filepath.Abs(scriptPath) - if err != nil { - return err - } - - var cmd *exec.Cmd - if isWindows() { - cmd = exec.CommandContext(ctx, "cmd", "/C", absScript) - } else { - // Execute script directly to honor shebang - cmd = exec.CommandContext(ctx, absScript) - } - cmd.Dir = repoPath - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - return cmd.Run() -} - -// isWindows returns true if running on Windows -func isWindows() bool { - return os.PathSeparator == '\\' -} diff --git a/pkg/dev/cmd_bundles.go b/pkg/dev/cmd_bundles.go deleted file mode 100644 index e2374e2..0000000 --- a/pkg/dev/cmd_bundles.go +++ /dev/null @@ -1,88 +0,0 @@ -package dev - -import ( - "context" - - "github.com/host-uk/core/pkg/agentic" - "github.com/host-uk/core/pkg/framework" - "github.com/host-uk/core/pkg/git" -) - -// WorkBundle contains the Core instance for dev work operations. -type WorkBundle struct { - Core *framework.Core -} - -// WorkBundleOptions configures the work bundle. -type WorkBundleOptions struct { - RegistryPath string - AllowEdit bool // Allow agentic to use Write/Edit tools -} - -// NewWorkBundle creates a bundle for dev work operations. -// Includes: dev (orchestration), git, agentic services. -func NewWorkBundle(opts WorkBundleOptions) (*WorkBundle, error) { - c, err := framework.New( - framework.WithService(NewService(ServiceOptions{ - RegistryPath: opts.RegistryPath, - })), - framework.WithService(git.NewService(git.ServiceOptions{})), - framework.WithService(agentic.NewService(agentic.ServiceOptions{ - AllowEdit: opts.AllowEdit, - })), - framework.WithServiceLock(), - ) - if err != nil { - return nil, err - } - - return &WorkBundle{Core: c}, nil -} - -// Start initialises the bundle services. -func (b *WorkBundle) Start(ctx context.Context) error { - return b.Core.ServiceStartup(ctx, nil) -} - -// Stop shuts down the bundle services. -func (b *WorkBundle) Stop(ctx context.Context) error { - return b.Core.ServiceShutdown(ctx) -} - -// StatusBundle contains the Core instance for status-only operations. -type StatusBundle struct { - Core *framework.Core -} - -// StatusBundleOptions configures the status bundle. -type StatusBundleOptions struct { - RegistryPath string -} - -// NewStatusBundle creates a bundle for status-only operations. -// Includes: dev (orchestration), git services. No agentic - commits not available. -func NewStatusBundle(opts StatusBundleOptions) (*StatusBundle, error) { - c, err := framework.New( - framework.WithService(NewService(ServiceOptions{ - RegistryPath: opts.RegistryPath, - })), - framework.WithService(git.NewService(git.ServiceOptions{})), - // No agentic service - TaskCommit will be unhandled - framework.WithServiceLock(), - ) - if err != nil { - return nil, err - } - - return &StatusBundle{Core: c}, nil -} - -// Start initialises the bundle services. -func (b *StatusBundle) Start(ctx context.Context) error { - return b.Core.ServiceStartup(ctx, nil) -} - -// Stop shuts down the bundle services. -func (b *StatusBundle) Stop(ctx context.Context) error { - return b.Core.ServiceShutdown(ctx) -} diff --git a/pkg/dev/cmd_ci.go b/pkg/dev/cmd_ci.go deleted file mode 100644 index 660b2df..0000000 --- a/pkg/dev/cmd_ci.go +++ /dev/null @@ -1,259 +0,0 @@ -package dev - -import ( - "encoding/json" - "errors" - "os" - "os/exec" - "strings" - "time" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" -) - -// CI-specific styles (aliases to shared) -var ( - ciSuccessStyle = cli.SuccessStyle - ciFailureStyle = cli.ErrorStyle - ciPendingStyle = cli.WarningStyle - ciSkippedStyle = cli.DimStyle -) - -// WorkflowRun represents a GitHub Actions workflow run -type WorkflowRun struct { - Name string `json:"name"` - Status string `json:"status"` - Conclusion string `json:"conclusion"` - HeadBranch string `json:"headBranch"` - CreatedAt time.Time `json:"createdAt"` - UpdatedAt time.Time `json:"updatedAt"` - URL string `json:"url"` - - // Added by us - RepoName string `json:"-"` -} - -// CI command flags -var ( - ciRegistryPath string - ciBranch string - ciFailedOnly bool -) - -// addCICommand adds the 'ci' command to the given parent command. -func addCICommand(parent *cli.Command) { - ciCmd := &cli.Command{ - Use: "ci", - Short: i18n.T("cmd.dev.ci.short"), - Long: i18n.T("cmd.dev.ci.long"), - RunE: func(cmd *cli.Command, args []string) error { - branch := ciBranch - if branch == "" { - branch = "main" - } - return runCI(ciRegistryPath, branch, ciFailedOnly) - }, - } - - ciCmd.Flags().StringVar(&ciRegistryPath, "registry", "", i18n.T("common.flag.registry")) - ciCmd.Flags().StringVarP(&ciBranch, "branch", "b", "main", i18n.T("cmd.dev.ci.flag.branch")) - ciCmd.Flags().BoolVar(&ciFailedOnly, "failed", false, i18n.T("cmd.dev.ci.flag.failed")) - - parent.AddCommand(ciCmd) -} - -func runCI(registryPath string, branch string, failedOnly bool) error { - // Check gh is available - if _, err := exec.LookPath("gh"); err != nil { - return errors.New(i18n.T("error.gh_not_found")) - } - - // Find or use provided registry - var reg *repos.Registry - var err error - - if registryPath != "" { - reg, err = repos.LoadRegistry(registryPath) - if err != nil { - return cli.Wrap(err, "failed to load registry") - } - } else { - registryPath, err = repos.FindRegistry() - if err == nil { - reg, err = repos.LoadRegistry(registryPath) - if err != nil { - return cli.Wrap(err, "failed to load registry") - } - } else { - cwd, _ := os.Getwd() - reg, err = repos.ScanDirectory(cwd) - if err != nil { - return cli.Wrap(err, "failed to scan directory") - } - } - } - - // Fetch CI status sequentially - var allRuns []WorkflowRun - var fetchErrors []error - var noCI []string - - repoList := reg.List() - for i, repo := range repoList { - repoFullName := cli.Sprintf("%s/%s", reg.Org, repo.Name) - cli.Print("\033[2K\r%s %d/%d %s", dimStyle.Render(i18n.T("i18n.progress.check")), i+1, len(repoList), repo.Name) - - runs, err := fetchWorkflowRuns(repoFullName, repo.Name, branch) - if err != nil { - if strings.Contains(err.Error(), "no workflows") { - noCI = append(noCI, repo.Name) - } else { - fetchErrors = append(fetchErrors, cli.Wrap(err, repo.Name)) - } - continue - } - - if len(runs) > 0 { - // Just get the latest run - allRuns = append(allRuns, runs[0]) - } else { - noCI = append(noCI, repo.Name) - } - } - cli.Print("\033[2K\r") // Clear progress line - - // Count by status - var success, failed, pending, other int - for _, run := range allRuns { - switch run.Conclusion { - case "success": - success++ - case "failure": - failed++ - case "": - if run.Status == "in_progress" || run.Status == "queued" { - pending++ - } else { - other++ - } - default: - other++ - } - } - - // Print summary - cli.Blank() - cli.Print("%s", i18n.T("cmd.dev.ci.repos_checked", map[string]interface{}{"Count": len(repoList)})) - if success > 0 { - cli.Print(" * %s", ciSuccessStyle.Render(i18n.T("cmd.dev.ci.passing", map[string]interface{}{"Count": success}))) - } - if failed > 0 { - cli.Print(" * %s", ciFailureStyle.Render(i18n.T("cmd.dev.ci.failing", map[string]interface{}{"Count": failed}))) - } - if pending > 0 { - cli.Print(" * %s", ciPendingStyle.Render(i18n.T("common.count.pending", map[string]interface{}{"Count": pending}))) - } - if len(noCI) > 0 { - cli.Print(" * %s", ciSkippedStyle.Render(i18n.T("cmd.dev.ci.no_ci", map[string]interface{}{"Count": len(noCI)}))) - } - cli.Blank() - cli.Blank() - - // Filter if needed - displayRuns := allRuns - if failedOnly { - displayRuns = nil - for _, run := range allRuns { - if run.Conclusion == "failure" { - displayRuns = append(displayRuns, run) - } - } - } - - // Print details - for _, run := range displayRuns { - printWorkflowRun(run) - } - - // Print errors - if len(fetchErrors) > 0 { - cli.Blank() - for _, err := range fetchErrors { - cli.Print("%s %s\n", errorStyle.Render(i18n.Label("error")), err) - } - } - - return nil -} - -func fetchWorkflowRuns(repoFullName, repoName string, branch string) ([]WorkflowRun, error) { - args := []string{ - "run", "list", - "--repo", repoFullName, - "--branch", branch, - "--limit", "1", - "--json", "name,status,conclusion,headBranch,createdAt,updatedAt,url", - } - - cmd := exec.Command("gh", args...) - output, err := cmd.Output() - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - stderr := string(exitErr.Stderr) - return nil, cli.Err("%s", strings.TrimSpace(stderr)) - } - return nil, err - } - - var runs []WorkflowRun - if err := json.Unmarshal(output, &runs); err != nil { - return nil, err - } - - // Tag with repo name - for i := range runs { - runs[i].RepoName = repoName - } - - return runs, nil -} - -func printWorkflowRun(run WorkflowRun) { - // Status icon - var status string - switch run.Conclusion { - case "success": - status = ciSuccessStyle.Render("v") - case "failure": - status = ciFailureStyle.Render("x") - case "": - if run.Status == "in_progress" { - status = ciPendingStyle.Render("*") - } else if run.Status == "queued" { - status = ciPendingStyle.Render("o") - } else { - status = ciSkippedStyle.Render("-") - } - case "skipped": - status = ciSkippedStyle.Render("-") - case "cancelled": - status = ciSkippedStyle.Render("o") - default: - status = ciSkippedStyle.Render("?") - } - - // Workflow name (truncated) - workflowName := cli.Truncate(run.Name, 20) - - // Age - age := cli.FormatAge(run.UpdatedAt) - - cli.Print(" %s %-18s %-22s %s\n", - status, - repoNameStyle.Render(run.RepoName), - dimStyle.Render(workflowName), - issueAgeStyle.Render(age), - ) -} diff --git a/pkg/dev/cmd_commit.go b/pkg/dev/cmd_commit.go deleted file mode 100644 index 55fad3f..0000000 --- a/pkg/dev/cmd_commit.go +++ /dev/null @@ -1,200 +0,0 @@ -package dev - -import ( - "context" - "os" - "path/filepath" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/git" - "github.com/host-uk/core/pkg/i18n" -) - -// Commit command flags -var ( - commitRegistryPath string - commitAll bool -) - -// addCommitCommand adds the 'commit' command to the given parent command. -func addCommitCommand(parent *cli.Command) { - commitCmd := &cli.Command{ - Use: "commit", - Short: i18n.T("cmd.dev.commit.short"), - Long: i18n.T("cmd.dev.commit.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runCommit(commitRegistryPath, commitAll) - }, - } - - commitCmd.Flags().StringVar(&commitRegistryPath, "registry", "", i18n.T("common.flag.registry")) - commitCmd.Flags().BoolVar(&commitAll, "all", false, i18n.T("cmd.dev.commit.flag.all")) - - parent.AddCommand(commitCmd) -} - -func runCommit(registryPath string, all bool) error { - ctx := context.Background() - cwd, _ := os.Getwd() - - // Check if current directory is a git repo (single-repo mode) - if registryPath == "" && isGitRepo(cwd) { - return runCommitSingleRepo(ctx, cwd, all) - } - - // Multi-repo mode: find or use provided registry - reg, regDir, err := loadRegistryWithConfig(registryPath) - if err != nil { - return err - } - registryPath = regDir // Use resolved registry directory for relative paths - - // Build paths and names for git operations - var paths []string - names := make(map[string]string) - - for _, repo := range reg.List() { - if repo.IsGitRepo() { - paths = append(paths, repo.Path) - names[repo.Path] = repo.Name - } - } - - if len(paths) == 0 { - cli.Text(i18n.T("cmd.dev.no_git_repos")) - return nil - } - - // Get status for all repos - statuses := git.Status(ctx, git.StatusOptions{ - Paths: paths, - Names: names, - }) - - // Find dirty repos - var dirtyRepos []git.RepoStatus - for _, s := range statuses { - if s.Error == nil && s.IsDirty() { - dirtyRepos = append(dirtyRepos, s) - } - } - - if len(dirtyRepos) == 0 { - cli.Text(i18n.T("cmd.dev.no_changes")) - return nil - } - - // Show dirty repos - cli.Print("\n%s\n\n", i18n.T("cmd.dev.repos_with_changes", map[string]interface{}{"Count": len(dirtyRepos)})) - for _, s := range dirtyRepos { - cli.Print(" %s: ", repoNameStyle.Render(s.Name)) - if s.Modified > 0 { - cli.Print("%s ", dirtyStyle.Render(i18n.T("cmd.dev.modified", map[string]interface{}{"Count": s.Modified}))) - } - if s.Untracked > 0 { - cli.Print("%s ", dirtyStyle.Render(i18n.T("cmd.dev.untracked", map[string]interface{}{"Count": s.Untracked}))) - } - if s.Staged > 0 { - cli.Print("%s ", aheadStyle.Render(i18n.T("cmd.dev.staged", map[string]interface{}{"Count": s.Staged}))) - } - cli.Blank() - } - - // Confirm unless --all - if !all { - cli.Blank() - if !cli.Confirm(i18n.T("cmd.dev.confirm_claude_commit")) { - cli.Text(i18n.T("cli.aborted")) - return nil - } - } - - cli.Blank() - - // Commit each dirty repo - var succeeded, failed int - for _, s := range dirtyRepos { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.dev.committing")), s.Name) - - if err := claudeCommit(ctx, s.Path, s.Name, registryPath); err != nil { - cli.Print(" %s %s\n", errorStyle.Render("x"), err) - failed++ - } else { - cli.Print(" %s %s\n", successStyle.Render("v"), i18n.T("cmd.dev.committed")) - succeeded++ - } - cli.Blank() - } - - // Summary - cli.Print("%s", successStyle.Render(i18n.T("cmd.dev.done_succeeded", map[string]interface{}{"Count": succeeded}))) - if failed > 0 { - cli.Print(", %s", errorStyle.Render(i18n.T("common.count.failed", map[string]interface{}{"Count": failed}))) - } - cli.Blank() - - return nil -} - -// isGitRepo checks if a directory is a git repository. -func isGitRepo(path string) bool { - gitDir := path + "/.git" - info, err := os.Stat(gitDir) - return err == nil && info.IsDir() -} - -// runCommitSingleRepo handles commit for a single repo (current directory). -func runCommitSingleRepo(ctx context.Context, repoPath string, all bool) error { - repoName := filepath.Base(repoPath) - - // Get status - statuses := git.Status(ctx, git.StatusOptions{ - Paths: []string{repoPath}, - Names: map[string]string{repoPath: repoName}, - }) - - if len(statuses) == 0 || statuses[0].Error != nil { - if len(statuses) > 0 && statuses[0].Error != nil { - return statuses[0].Error - } - return cli.Err("failed to get repo status") - } - - s := statuses[0] - if !s.IsDirty() { - cli.Text(i18n.T("cmd.dev.no_changes")) - return nil - } - - // Show status - cli.Print("%s: ", repoNameStyle.Render(s.Name)) - if s.Modified > 0 { - cli.Print("%s ", dirtyStyle.Render(i18n.T("cmd.dev.modified", map[string]interface{}{"Count": s.Modified}))) - } - if s.Untracked > 0 { - cli.Print("%s ", dirtyStyle.Render(i18n.T("cmd.dev.untracked", map[string]interface{}{"Count": s.Untracked}))) - } - if s.Staged > 0 { - cli.Print("%s ", aheadStyle.Render(i18n.T("cmd.dev.staged", map[string]interface{}{"Count": s.Staged}))) - } - cli.Blank() - - // Confirm unless --all - if !all { - cli.Blank() - if !cli.Confirm(i18n.T("cmd.dev.confirm_claude_commit")) { - cli.Text(i18n.T("cli.aborted")) - return nil - } - } - - cli.Blank() - - // Commit - if err := claudeCommit(ctx, repoPath, repoName, ""); err != nil { - cli.Print(" %s %s\n", errorStyle.Render("x"), err) - return err - } - cli.Print(" %s %s\n", successStyle.Render("v"), i18n.T("cmd.dev.committed")) - return nil -} \ No newline at end of file diff --git a/pkg/dev/cmd_dev.go b/pkg/dev/cmd_dev.go deleted file mode 100644 index 2cbe57d..0000000 --- a/pkg/dev/cmd_dev.go +++ /dev/null @@ -1,96 +0,0 @@ -// Package dev provides multi-repo development workflow commands. -// -// Git Operations: -// - work: Combined status, commit, and push workflow -// - health: Quick health check across all repos -// - commit: Claude-assisted commit message generation -// - push: Push repos with unpushed commits -// - pull: Pull repos that are behind remote -// -// GitHub Integration (requires gh CLI): -// - issues: List open issues across repos -// - reviews: List PRs needing review -// - ci: Check GitHub Actions CI status -// - impact: Analyse dependency impact of changes -// -// CI/Workflow Management: -// - workflow list: Show table of repos vs workflows -// - workflow sync: Copy workflow template to all repos -// -// API Tools: -// - api sync: Synchronize public service APIs -// -// Dev Environment (VM management): -// - install: Download dev environment image -// - boot: Start dev environment VM -// - stop: Stop dev environment VM -// - status: Check dev VM status -// - shell: Open shell in dev VM -// - serve: Mount project and start dev server -// - test: Run tests in dev environment -// - claude: Start sandboxed Claude session -// - update: Check for and apply updates -package dev - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -func init() { - cli.RegisterCommands(AddDevCommands) -} - -// Style aliases from shared package -var ( - successStyle = cli.SuccessStyle - errorStyle = cli.ErrorStyle - warningStyle = cli.WarningStyle - dimStyle = cli.DimStyle - valueStyle = cli.ValueStyle - headerStyle = cli.HeaderStyle - repoNameStyle = cli.RepoStyle -) - -// Table styles for status display (extends shared styles with cell padding) -var ( - dirtyStyle = cli.NewStyle().Foreground(cli.ColourRed500) - aheadStyle = cli.NewStyle().Foreground(cli.ColourAmber500) - cleanStyle = cli.NewStyle().Foreground(cli.ColourGreen500) -) - -// AddDevCommands registers the 'dev' command and all subcommands. -func AddDevCommands(root *cli.Command) { - devCmd := &cli.Command{ - Use: "dev", - Short: i18n.T("cmd.dev.short"), - Long: i18n.T("cmd.dev.long"), - } - root.AddCommand(devCmd) - - // Git operations - addWorkCommand(devCmd) - addHealthCommand(devCmd) - addCommitCommand(devCmd) - addPushCommand(devCmd) - addPullCommand(devCmd) - - // Safe git operations for AI agents - addFileSyncCommand(devCmd) - addApplyCommand(devCmd) - - // GitHub integration - addIssuesCommand(devCmd) - addReviewsCommand(devCmd) - addCICommand(devCmd) - addImpactCommand(devCmd) - - // CI/Workflow management - addWorkflowCommands(devCmd) - - // API tools - addAPICommands(devCmd) - - // Dev environment - addVMCommands(devCmd) -} diff --git a/pkg/dev/cmd_file_sync.go b/pkg/dev/cmd_file_sync.go deleted file mode 100644 index 6dbd8a7..0000000 --- a/pkg/dev/cmd_file_sync.go +++ /dev/null @@ -1,350 +0,0 @@ -// cmd_file_sync.go implements safe file synchronization across repos for AI agents. -// -// Usage: -// core dev sync workflow.yml --to="packages/core-*" -// core dev sync .github/workflows/ --to="packages/core-*" --message="feat: add CI" -// core dev sync config.yaml --to="packages/core-*" --dry-run - -package dev - -import ( - "context" - "io" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/errors" - "github.com/host-uk/core/pkg/git" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" -) - -// File sync command flags -var ( - fileSyncTo string - fileSyncMessage string - fileSyncCoAuthor string - fileSyncDryRun bool - fileSyncPush bool -) - -// addFileSyncCommand adds the 'sync' command to dev for file syncing. -func addFileSyncCommand(parent *cli.Command) { - syncCmd := &cli.Command{ - Use: "sync ", - Short: i18n.T("cmd.dev.file_sync.short"), - Long: i18n.T("cmd.dev.file_sync.long"), - Args: cli.MinimumNArgs(1), - RunE: func(cmd *cli.Command, args []string) error { - return runFileSync(args[0]) - }, - } - - syncCmd.Flags().StringVar(&fileSyncTo, "to", "", i18n.T("cmd.dev.file_sync.flag.to")) - syncCmd.Flags().StringVarP(&fileSyncMessage, "message", "m", "", i18n.T("cmd.dev.file_sync.flag.message")) - syncCmd.Flags().StringVar(&fileSyncCoAuthor, "co-author", "", i18n.T("cmd.dev.file_sync.flag.co_author")) - syncCmd.Flags().BoolVar(&fileSyncDryRun, "dry-run", false, i18n.T("cmd.dev.file_sync.flag.dry_run")) - syncCmd.Flags().BoolVar(&fileSyncPush, "push", false, i18n.T("cmd.dev.file_sync.flag.push")) - - _ = syncCmd.MarkFlagRequired("to") - - parent.AddCommand(syncCmd) -} - -func runFileSync(source string) error { - ctx := context.Background() - - // Security: Reject path traversal attempts - if strings.Contains(source, "..") { - return errors.E("dev.sync", "path traversal not allowed", nil) - } - - // Validate source exists - sourceInfo, err := os.Stat(source) - if err != nil { - return errors.E("dev.sync", i18n.T("cmd.dev.file_sync.error.source_not_found", map[string]interface{}{"Path": source}), err) - } - - // Find target repos - targetRepos, err := resolveTargetRepos(fileSyncTo) - if err != nil { - return err - } - - if len(targetRepos) == 0 { - return cli.Err("%s", i18n.T("cmd.dev.file_sync.error.no_targets")) - } - - // Show plan - cli.Print("%s: %s\n", dimStyle.Render(i18n.T("cmd.dev.file_sync.source")), source) - cli.Print("%s: %d repos\n", dimStyle.Render(i18n.T("cmd.dev.file_sync.targets")), len(targetRepos)) - if fileSyncDryRun { - cli.Print("%s\n", warningStyle.Render(i18n.T("cmd.dev.file_sync.dry_run_mode"))) - } - cli.Blank() - - var succeeded, skipped, failed int - - for _, repo := range targetRepos { - repoName := filepath.Base(repo.Path) - - if fileSyncDryRun { - cli.Print(" %s %s\n", dimStyle.Render("[dry-run]"), repoName) - succeeded++ - continue - } - - // Step 1: Pull latest (safe sync) - if err := safePull(ctx, repo.Path); err != nil { - cli.Print(" %s %s: pull failed: %s\n", errorStyle.Render("x"), repoName, err) - failed++ - continue - } - - // Step 2: Copy file(s) - destPath := filepath.Join(repo.Path, source) - if sourceInfo.IsDir() { - if err := copyDir(source, destPath); err != nil { - cli.Print(" %s %s: copy failed: %s\n", errorStyle.Render("x"), repoName, err) - failed++ - continue - } - } else { - if err := copyFile(source, destPath); err != nil { - cli.Print(" %s %s: copy failed: %s\n", errorStyle.Render("x"), repoName, err) - failed++ - continue - } - } - - // Step 3: Check if anything changed - statuses := git.Status(ctx, git.StatusOptions{ - Paths: []string{repo.Path}, - Names: map[string]string{repo.Path: repoName}, - }) - if len(statuses) == 0 || !statuses[0].IsDirty() { - cli.Print(" %s %s: %s\n", dimStyle.Render("-"), repoName, i18n.T("cmd.dev.file_sync.no_changes")) - skipped++ - continue - } - - // Step 4: Commit if message provided - if fileSyncMessage != "" { - commitMsg := fileSyncMessage - if fileSyncCoAuthor != "" { - commitMsg += "\n\nCo-Authored-By: " + fileSyncCoAuthor - } - - if err := gitAddCommit(ctx, repo.Path, source, commitMsg); err != nil { - cli.Print(" %s %s: commit failed: %s\n", errorStyle.Render("x"), repoName, err) - failed++ - continue - } - - // Step 5: Push if requested - if fileSyncPush { - if err := safePush(ctx, repo.Path); err != nil { - cli.Print(" %s %s: push failed: %s\n", errorStyle.Render("x"), repoName, err) - failed++ - continue - } - } - } - - cli.Print(" %s %s\n", successStyle.Render("v"), repoName) - succeeded++ - } - - // Summary - cli.Blank() - cli.Print("%s: ", i18n.T("cmd.dev.file_sync.summary")) - if succeeded > 0 { - cli.Print("%s", successStyle.Render(i18n.T("common.count.succeeded", map[string]interface{}{"Count": succeeded}))) - } - if skipped > 0 { - if succeeded > 0 { - cli.Print(", ") - } - cli.Print("%s", dimStyle.Render(i18n.T("common.count.skipped", map[string]interface{}{"Count": skipped}))) - } - if failed > 0 { - if succeeded > 0 || skipped > 0 { - cli.Print(", ") - } - cli.Print("%s", errorStyle.Render(i18n.T("common.count.failed", map[string]interface{}{"Count": failed}))) - } - cli.Blank() - - return nil -} - -// resolveTargetRepos resolves the --to pattern to actual repos -func resolveTargetRepos(pattern string) ([]*repos.Repo, error) { - // Load registry - registryPath, err := repos.FindRegistry() - if err != nil { - return nil, errors.E("dev.sync", "failed to find registry", err) - } - - registry, err := repos.LoadRegistry(registryPath) - if err != nil { - return nil, errors.E("dev.sync", "failed to load registry", err) - } - - // Match pattern against repo names - var matched []*repos.Repo - for _, repo := range registry.Repos { - if matchGlob(repo.Name, pattern) || matchGlob(repo.Path, pattern) { - matched = append(matched, repo) - } - } - - return matched, nil -} - -// matchGlob performs simple glob matching with * wildcards -func matchGlob(s, pattern string) bool { - // Handle exact match - if s == pattern { - return true - } - - // Handle * at end - if strings.HasSuffix(pattern, "*") { - prefix := strings.TrimSuffix(pattern, "*") - return strings.HasPrefix(s, prefix) - } - - // Handle * at start - if strings.HasPrefix(pattern, "*") { - suffix := strings.TrimPrefix(pattern, "*") - return strings.HasSuffix(s, suffix) - } - - // Handle * in middle - if strings.Contains(pattern, "*") { - parts := strings.SplitN(pattern, "*", 2) - return strings.HasPrefix(s, parts[0]) && strings.HasSuffix(s, parts[1]) - } - - return false -} - -// safePull pulls with rebase, handling errors gracefully -func safePull(ctx context.Context, path string) error { - // Check if we have upstream - _, err := gitCommandQuiet(ctx, path, "rev-parse", "--abbrev-ref", "@{u}") - if err != nil { - // No upstream set, skip pull - return nil - } - - return git.Pull(ctx, path) -} - -// safePush pushes with automatic pull-rebase on rejection -func safePush(ctx context.Context, path string) error { - err := git.Push(ctx, path) - if err == nil { - return nil - } - - // If non-fast-forward, try pull and push again - if git.IsNonFastForward(err) { - if pullErr := git.Pull(ctx, path); pullErr != nil { - return pullErr - } - return git.Push(ctx, path) - } - - return err -} - -// gitAddCommit stages and commits a file/directory -func gitAddCommit(ctx context.Context, repoPath, filePath, message string) error { - // Stage the file(s) - if _, err := gitCommandQuiet(ctx, repoPath, "add", filePath); err != nil { - return err - } - - // Commit - _, err := gitCommandQuiet(ctx, repoPath, "commit", "-m", message) - return err -} - -// gitCommandQuiet runs a git command without output -func gitCommandQuiet(ctx context.Context, dir string, args ...string) (string, error) { - cmd := exec.CommandContext(ctx, "git", args...) - cmd.Dir = dir - - output, err := cmd.CombinedOutput() - if err != nil { - return "", cli.Err("%s", strings.TrimSpace(string(output))) - } - return string(output), nil -} - -// copyFile copies a single file -func copyFile(src, dst string) error { - // Ensure parent directory exists - if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { - return err - } - - srcFile, err := os.Open(src) - if err != nil { - return err - } - defer srcFile.Close() - - srcInfo, err := srcFile.Stat() - if err != nil { - return err - } - - dstFile, err := os.OpenFile(dst, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, srcInfo.Mode()) - if err != nil { - return err - } - defer dstFile.Close() - - _, err = io.Copy(dstFile, srcFile) - return err -} - -// copyDir recursively copies a directory -func copyDir(src, dst string) error { - srcInfo, err := os.Stat(src) - if err != nil { - return err - } - - if err := os.MkdirAll(dst, srcInfo.Mode()); err != nil { - return err - } - - entries, err := os.ReadDir(src) - if err != nil { - return err - } - - for _, entry := range entries { - srcPath := filepath.Join(src, entry.Name()) - dstPath := filepath.Join(dst, entry.Name()) - - if entry.IsDir() { - if err := copyDir(srcPath, dstPath); err != nil { - return err - } - } else { - if err := copyFile(srcPath, dstPath); err != nil { - return err - } - } - } - - return nil -} diff --git a/pkg/dev/cmd_health.go b/pkg/dev/cmd_health.go deleted file mode 100644 index f1ed360..0000000 --- a/pkg/dev/cmd_health.go +++ /dev/null @@ -1,185 +0,0 @@ -package dev - -import ( - "context" - "fmt" - "sort" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/git" - "github.com/host-uk/core/pkg/i18n" -) - -// Health command flags -var ( - healthRegistryPath string - healthVerbose bool -) - -// addHealthCommand adds the 'health' command to the given parent command. -func addHealthCommand(parent *cli.Command) { - healthCmd := &cli.Command{ - Use: "health", - Short: i18n.T("cmd.dev.health.short"), - Long: i18n.T("cmd.dev.health.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runHealth(healthRegistryPath, healthVerbose) - }, - } - - healthCmd.Flags().StringVar(&healthRegistryPath, "registry", "", i18n.T("common.flag.registry")) - healthCmd.Flags().BoolVarP(&healthVerbose, "verbose", "v", false, i18n.T("cmd.dev.health.flag.verbose")) - - parent.AddCommand(healthCmd) -} - -func runHealth(registryPath string, verbose bool) error { - ctx := context.Background() - - // Load registry and get paths - reg, _, err := loadRegistryWithConfig(registryPath) - if err != nil { - return err - } - - // Build paths and names for git operations - var paths []string - names := make(map[string]string) - - for _, repo := range reg.List() { - if repo.IsGitRepo() { - paths = append(paths, repo.Path) - names[repo.Path] = repo.Name - } - } - - if len(paths) == 0 { - cli.Text(i18n.T("cmd.dev.no_git_repos")) - return nil - } - - // Get status for all repos - statuses := git.Status(ctx, git.StatusOptions{ - Paths: paths, - Names: names, - }) - - // Sort for consistent verbose output - sort.Slice(statuses, func(i, j int) bool { - return statuses[i].Name < statuses[j].Name - }) - - // Aggregate stats - var ( - totalRepos = len(statuses) - dirtyRepos []string - aheadRepos []string - behindRepos []string - errorRepos []string - ) - - for _, s := range statuses { - if s.Error != nil { - errorRepos = append(errorRepos, s.Name) - continue - } - if s.IsDirty() { - dirtyRepos = append(dirtyRepos, s.Name) - } - if s.HasUnpushed() { - aheadRepos = append(aheadRepos, s.Name) - } - if s.HasUnpulled() { - behindRepos = append(behindRepos, s.Name) - } - } - - // Print summary line - cli.Blank() - printHealthSummary(totalRepos, dirtyRepos, aheadRepos, behindRepos, errorRepos) - cli.Blank() - - // Verbose output - if verbose { - if len(dirtyRepos) > 0 { - cli.Print("%s %s\n", warningStyle.Render(i18n.T("cmd.dev.health.dirty_label")), formatRepoList(dirtyRepos)) - } - if len(aheadRepos) > 0 { - cli.Print("%s %s\n", successStyle.Render(i18n.T("cmd.dev.health.ahead_label")), formatRepoList(aheadRepos)) - } - if len(behindRepos) > 0 { - cli.Print("%s %s\n", warningStyle.Render(i18n.T("cmd.dev.health.behind_label")), formatRepoList(behindRepos)) - } - if len(errorRepos) > 0 { - cli.Print("%s %s\n", errorStyle.Render(i18n.T("cmd.dev.health.errors_label")), formatRepoList(errorRepos)) - } - cli.Blank() - } - - return nil -} - -func printHealthSummary(total int, dirty, ahead, behind, errors []string) { - parts := []string{ - statusPart(total, i18n.T("cmd.dev.health.repos"), cli.ValueStyle), - } - - // Dirty status - if len(dirty) > 0 { - parts = append(parts, statusPart(len(dirty), i18n.T("common.status.dirty"), cli.WarningStyle)) - } else { - parts = append(parts, statusText(i18n.T("cmd.dev.status.clean"), cli.SuccessStyle)) - } - - // Push status - if len(ahead) > 0 { - parts = append(parts, statusPart(len(ahead), i18n.T("cmd.dev.health.to_push"), cli.ValueStyle)) - } else { - parts = append(parts, statusText(i18n.T("common.status.synced"), cli.SuccessStyle)) - } - - // Pull status - if len(behind) > 0 { - parts = append(parts, statusPart(len(behind), i18n.T("cmd.dev.health.to_pull"), cli.WarningStyle)) - } else { - parts = append(parts, statusText(i18n.T("common.status.up_to_date"), cli.SuccessStyle)) - } - - // Errors (only if any) - if len(errors) > 0 { - parts = append(parts, statusPart(len(errors), i18n.T("cmd.dev.health.errors"), cli.ErrorStyle)) - } - - cli.Text(statusLine(parts...)) -} - -func formatRepoList(reposList []string) string { - if len(reposList) <= 5 { - return joinRepos(reposList) - } - return joinRepos(reposList[:5]) + " " + i18n.T("cmd.dev.health.more", map[string]interface{}{"Count": len(reposList) - 5}) -} - -func joinRepos(reposList []string) string { - result := "" - for i, r := range reposList { - if i > 0 { - result += ", " - } - result += r - } - return result -} - -func statusPart(count int, label string, style *cli.AnsiStyle) string { - return style.Render(fmt.Sprintf("%d %s", count, label)) -} - -func statusText(text string, style *cli.AnsiStyle) string { - return style.Render(text) -} - -func statusLine(parts ...string) string { - return strings.Join(parts, " | ") -} diff --git a/pkg/dev/cmd_impact.go b/pkg/dev/cmd_impact.go deleted file mode 100644 index 22a499d..0000000 --- a/pkg/dev/cmd_impact.go +++ /dev/null @@ -1,183 +0,0 @@ -package dev - -import ( - "errors" - "sort" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" -) - -// Impact-specific styles (aliases to shared) -var ( - impactDirectStyle = cli.ErrorStyle - impactIndirectStyle = cli.WarningStyle - impactSafeStyle = cli.SuccessStyle -) - -// Impact command flags -var impactRegistryPath string - -// addImpactCommand adds the 'impact' command to the given parent command. -func addImpactCommand(parent *cli.Command) { - impactCmd := &cli.Command{ - Use: "impact ", - Short: i18n.T("cmd.dev.impact.short"), - Long: i18n.T("cmd.dev.impact.long"), - Args: cli.ExactArgs(1), - RunE: func(cmd *cli.Command, args []string) error { - return runImpact(impactRegistryPath, args[0]) - }, - } - - impactCmd.Flags().StringVar(&impactRegistryPath, "registry", "", i18n.T("common.flag.registry")) - - parent.AddCommand(impactCmd) -} - -func runImpact(registryPath string, repoName string) error { - // Find or use provided registry - var reg *repos.Registry - var err error - - if registryPath != "" { - reg, err = repos.LoadRegistry(registryPath) - if err != nil { - return cli.Wrap(err, "failed to load registry") - } - } else { - registryPath, err = repos.FindRegistry() - if err == nil { - reg, err = repos.LoadRegistry(registryPath) - if err != nil { - return cli.Wrap(err, "failed to load registry") - } - } else { - return errors.New(i18n.T("cmd.dev.impact.requires_registry")) - } - } - - // Check repo exists - repo, exists := reg.Get(repoName) - if !exists { - return errors.New(i18n.T("error.repo_not_found", map[string]interface{}{"Name": repoName})) - } - - // Build reverse dependency graph - dependents := buildDependentsGraph(reg) - - // Find all affected repos (direct and transitive) - direct := dependents[repoName] - allAffected := findAllDependents(repoName, dependents) - - // Separate direct vs indirect - directSet := make(map[string]bool) - for _, d := range direct { - directSet[d] = true - } - - var indirect []string - for _, a := range allAffected { - if !directSet[a] { - indirect = append(indirect, a) - } - } - - // Sort for consistent output - sort.Strings(direct) - sort.Strings(indirect) - - // Print results - cli.Blank() - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.dev.impact.analysis_for")), repoNameStyle.Render(repoName)) - if repo.Description != "" { - cli.Print("%s\n", dimStyle.Render(repo.Description)) - } - cli.Blank() - - if len(allAffected) == 0 { - cli.Print("%s %s\n", impactSafeStyle.Render("v"), i18n.T("cmd.dev.impact.no_dependents", map[string]interface{}{"Name": repoName})) - return nil - } - - // Direct dependents - if len(direct) > 0 { - cli.Print("%s %s\n", - impactDirectStyle.Render("*"), - i18n.T("cmd.dev.impact.direct_dependents", map[string]interface{}{"Count": len(direct)}), - ) - for _, d := range direct { - r, _ := reg.Get(d) - desc := "" - if r != nil && r.Description != "" { - desc = dimStyle.Render(" - " + cli.Truncate(r.Description, 40)) - } - cli.Print(" %s%s\n", d, desc) - } - cli.Blank() - } - - // Indirect dependents - if len(indirect) > 0 { - cli.Print("%s %s\n", - impactIndirectStyle.Render("o"), - i18n.T("cmd.dev.impact.transitive_dependents", map[string]interface{}{"Count": len(indirect)}), - ) - for _, d := range indirect { - r, _ := reg.Get(d) - desc := "" - if r != nil && r.Description != "" { - desc = dimStyle.Render(" - " + cli.Truncate(r.Description, 40)) - } - cli.Print(" %s%s\n", d, desc) - } - cli.Blank() - } - - // Summary - cli.Print("%s %s\n", - dimStyle.Render(i18n.Label("summary")), - i18n.T("cmd.dev.impact.changes_affect", map[string]interface{}{ - "Repo": repoNameStyle.Render(repoName), - "Affected": len(allAffected), - "Total": len(reg.Repos) - 1, - }), - ) - - return nil -} - -// buildDependentsGraph creates a reverse dependency map -// key = repo, value = repos that depend on it -func buildDependentsGraph(reg *repos.Registry) map[string][]string { - dependents := make(map[string][]string) - - for name, repo := range reg.Repos { - for _, dep := range repo.DependsOn { - dependents[dep] = append(dependents[dep], name) - } - } - - return dependents -} - -// findAllDependents recursively finds all repos that depend on the given repo -func findAllDependents(repoName string, dependents map[string][]string) []string { - visited := make(map[string]bool) - var result []string - - var visit func(name string) - visit = func(name string) { - for _, dep := range dependents[name] { - if !visited[dep] { - visited[dep] = true - result = append(result, dep) - visit(dep) // Recurse for transitive deps - } - } - } - - visit(repoName) - return result -} diff --git a/pkg/dev/cmd_issues.go b/pkg/dev/cmd_issues.go deleted file mode 100644 index 834a7b5..0000000 --- a/pkg/dev/cmd_issues.go +++ /dev/null @@ -1,208 +0,0 @@ -package dev - -import ( - "encoding/json" - "errors" - "os/exec" - "sort" - "strings" - "time" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -// Issue-specific styles (aliases to shared) -var ( - issueRepoStyle = cli.DimStyle - issueNumberStyle = cli.TitleStyle - issueTitleStyle = cli.ValueStyle - issueLabelStyle = cli.WarningStyle - issueAssigneeStyle = cli.SuccessStyle - issueAgeStyle = cli.DimStyle -) - -// GitHubIssue represents a GitHub issue from the API. -type GitHubIssue struct { - Number int `json:"number"` - Title string `json:"title"` - State string `json:"state"` - CreatedAt time.Time `json:"createdAt"` - Author struct { - Login string `json:"login"` - } `json:"author"` - Assignees struct { - Nodes []struct { - Login string `json:"login"` - } `json:"nodes"` - } `json:"assignees"` - Labels struct { - Nodes []struct { - Name string `json:"name"` - } `json:"nodes"` - } `json:"labels"` - URL string `json:"url"` - - // Added by us - RepoName string `json:"-"` -} - -// Issues command flags -var ( - issuesRegistryPath string - issuesLimit int - issuesAssignee string -) - -// addIssuesCommand adds the 'issues' command to the given parent command. -func addIssuesCommand(parent *cli.Command) { - issuesCmd := &cli.Command{ - Use: "issues", - Short: i18n.T("cmd.dev.issues.short"), - Long: i18n.T("cmd.dev.issues.long"), - RunE: func(cmd *cli.Command, args []string) error { - limit := issuesLimit - if limit == 0 { - limit = 10 - } - return runIssues(issuesRegistryPath, limit, issuesAssignee) - }, - } - - issuesCmd.Flags().StringVar(&issuesRegistryPath, "registry", "", i18n.T("common.flag.registry")) - issuesCmd.Flags().IntVarP(&issuesLimit, "limit", "l", 10, i18n.T("cmd.dev.issues.flag.limit")) - issuesCmd.Flags().StringVarP(&issuesAssignee, "assignee", "a", "", i18n.T("cmd.dev.issues.flag.assignee")) - - parent.AddCommand(issuesCmd) -} - -func runIssues(registryPath string, limit int, assignee string) error { - // Check gh is available - if _, err := exec.LookPath("gh"); err != nil { - return errors.New(i18n.T("error.gh_not_found")) - } - - // Find or use provided registry - reg, _, err := loadRegistryWithConfig(registryPath) - if err != nil { - return err - } - - // Fetch issues sequentially (avoid GitHub rate limits) - var allIssues []GitHubIssue - var fetchErrors []error - - repoList := reg.List() - for i, repo := range repoList { - repoFullName := cli.Sprintf("%s/%s", reg.Org, repo.Name) - cli.Print("\033[2K\r%s %d/%d %s", dimStyle.Render(i18n.T("i18n.progress.fetch")), i+1, len(repoList), repo.Name) - - issues, err := fetchIssues(repoFullName, repo.Name, limit, assignee) - if err != nil { - fetchErrors = append(fetchErrors, cli.Wrap(err, repo.Name)) - continue - } - allIssues = append(allIssues, issues...) - } - cli.Print("\033[2K\r") // Clear progress line - - // Sort by created date (newest first) - sort.Slice(allIssues, func(i, j int) bool { - return allIssues[i].CreatedAt.After(allIssues[j].CreatedAt) - }) - - // Print issues - if len(allIssues) == 0 { - cli.Text(i18n.T("cmd.dev.issues.no_issues")) - return nil - } - - cli.Print("\n%s\n\n", i18n.T("cmd.dev.issues.open_issues", map[string]interface{}{"Count": len(allIssues)})) - - for _, issue := range allIssues { - printIssue(issue) - } - - // Print any errors - if len(fetchErrors) > 0 { - cli.Blank() - for _, err := range fetchErrors { - cli.Print("%s %s\n", errorStyle.Render(i18n.Label("error")), err) - } - } - - return nil -} - -func fetchIssues(repoFullName, repoName string, limit int, assignee string) ([]GitHubIssue, error) { - args := []string{ - "issue", "list", - "--repo", repoFullName, - "--state", "open", - "--limit", cli.Sprintf("%d", limit), - "--json", "number,title,state,createdAt,author,assignees,labels,url", - } - - if assignee != "" { - args = append(args, "--assignee", assignee) - } - - cmd := exec.Command("gh", args...) - output, err := cmd.Output() - if err != nil { - // Check if it's just "no issues" vs actual error - if exitErr, ok := err.(*exec.ExitError); ok { - stderr := string(exitErr.Stderr) - if strings.Contains(stderr, "no issues") || strings.Contains(stderr, "Could not resolve") { - return nil, nil - } - return nil, cli.Err("%s", stderr) - } - return nil, err - } - - var issues []GitHubIssue - if err := json.Unmarshal(output, &issues); err != nil { - return nil, err - } - - // Tag with repo name - for i := range issues { - issues[i].RepoName = repoName - } - - return issues, nil -} - -func printIssue(issue GitHubIssue) { - // #42 [core-bio] Fix avatar upload - num := issueNumberStyle.Render(cli.Sprintf("#%d", issue.Number)) - repo := issueRepoStyle.Render(cli.Sprintf("[%s]", issue.RepoName)) - title := issueTitleStyle.Render(cli.Truncate(issue.Title, 60)) - - line := cli.Sprintf(" %s %s %s", num, repo, title) - - // Add labels if any - if len(issue.Labels.Nodes) > 0 { - var labels []string - for _, l := range issue.Labels.Nodes { - labels = append(labels, l.Name) - } - line += " " + issueLabelStyle.Render("["+strings.Join(labels, ", ")+"]") - } - - // Add assignee if any - if len(issue.Assignees.Nodes) > 0 { - var assignees []string - for _, a := range issue.Assignees.Nodes { - assignees = append(assignees, "@"+a.Login) - } - line += " " + issueAssigneeStyle.Render(strings.Join(assignees, ", ")) - } - - // Add age - age := cli.FormatAge(issue.CreatedAt) - line += " " + issueAgeStyle.Render(age) - - cli.Text(line) -} diff --git a/pkg/dev/cmd_pull.go b/pkg/dev/cmd_pull.go deleted file mode 100644 index 1b29b7f..0000000 --- a/pkg/dev/cmd_pull.go +++ /dev/null @@ -1,130 +0,0 @@ -package dev - -import ( - "context" - "os/exec" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/git" - "github.com/host-uk/core/pkg/i18n" -) - -// Pull command flags -var ( - pullRegistryPath string - pullAll bool -) - -// addPullCommand adds the 'pull' command to the given parent command. -func addPullCommand(parent *cli.Command) { - pullCmd := &cli.Command{ - Use: "pull", - Short: i18n.T("cmd.dev.pull.short"), - Long: i18n.T("cmd.dev.pull.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runPull(pullRegistryPath, pullAll) - }, - } - - pullCmd.Flags().StringVar(&pullRegistryPath, "registry", "", i18n.T("common.flag.registry")) - pullCmd.Flags().BoolVar(&pullAll, "all", false, i18n.T("cmd.dev.pull.flag.all")) - - parent.AddCommand(pullCmd) -} - -func runPull(registryPath string, all bool) error { - ctx := context.Background() - - // Find or use provided registry - reg, _, err := loadRegistryWithConfig(registryPath) - if err != nil { - return err - } - - // Build paths and names for git operations - var paths []string - names := make(map[string]string) - - for _, repo := range reg.List() { - if repo.IsGitRepo() { - paths = append(paths, repo.Path) - names[repo.Path] = repo.Name - } - } - - if len(paths) == 0 { - cli.Text(i18n.T("cmd.dev.no_git_repos")) - return nil - } - - // Get status for all repos - statuses := git.Status(ctx, git.StatusOptions{ - Paths: paths, - Names: names, - }) - - // Find repos to pull - var toPull []git.RepoStatus - for _, s := range statuses { - if s.Error != nil { - continue - } - if all || s.HasUnpulled() { - toPull = append(toPull, s) - } - } - - if len(toPull) == 0 { - cli.Text(i18n.T("cmd.dev.pull.all_up_to_date")) - return nil - } - - // Show what we're pulling - if all { - cli.Print("\n%s\n\n", i18n.T("cmd.dev.pull.pulling_repos", map[string]interface{}{"Count": len(toPull)})) - } else { - cli.Print("\n%s\n\n", i18n.T("cmd.dev.pull.repos_behind", map[string]interface{}{"Count": len(toPull)})) - for _, s := range toPull { - cli.Print(" %s: %s\n", - repoNameStyle.Render(s.Name), - dimStyle.Render(i18n.T("cmd.dev.pull.commits_behind", map[string]interface{}{"Count": s.Behind})), - ) - } - cli.Blank() - } - - // Pull each repo - var succeeded, failed int - for _, s := range toPull { - cli.Print(" %s %s... ", dimStyle.Render(i18n.T("cmd.dev.pull.pulling")), s.Name) - - err := gitPull(ctx, s.Path) - if err != nil { - cli.Print("%s\n", errorStyle.Render("x "+err.Error())) - failed++ - } else { - cli.Print("%s\n", successStyle.Render("v")) - succeeded++ - } - } - - // Summary - cli.Blank() - cli.Print("%s", successStyle.Render(i18n.T("cmd.dev.pull.done_pulled", map[string]interface{}{"Count": succeeded}))) - if failed > 0 { - cli.Print(", %s", errorStyle.Render(i18n.T("common.count.failed", map[string]interface{}{"Count": failed}))) - } - cli.Blank() - - return nil -} - -func gitPull(ctx context.Context, path string) error { - cmd := exec.CommandContext(ctx, "git", "pull", "--ff-only") - cmd.Dir = path - output, err := cmd.CombinedOutput() - if err != nil { - return cli.Err("%s", string(output)) - } - return nil -} diff --git a/pkg/dev/cmd_push.go b/pkg/dev/cmd_push.go deleted file mode 100644 index 173ed38..0000000 --- a/pkg/dev/cmd_push.go +++ /dev/null @@ -1,275 +0,0 @@ -package dev - -import ( - "context" - "os" - "path/filepath" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/git" - "github.com/host-uk/core/pkg/i18n" -) - -// Push command flags -var ( - pushRegistryPath string - pushForce bool -) - -// addPushCommand adds the 'push' command to the given parent command. -func addPushCommand(parent *cli.Command) { - pushCmd := &cli.Command{ - Use: "push", - Short: i18n.T("cmd.dev.push.short"), - Long: i18n.T("cmd.dev.push.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runPush(pushRegistryPath, pushForce) - }, - } - - pushCmd.Flags().StringVar(&pushRegistryPath, "registry", "", i18n.T("common.flag.registry")) - pushCmd.Flags().BoolVarP(&pushForce, "force", "f", false, i18n.T("cmd.dev.push.flag.force")) - - parent.AddCommand(pushCmd) -} - -func runPush(registryPath string, force bool) error { - ctx := context.Background() - cwd, _ := os.Getwd() - - // Check if current directory is a git repo (single-repo mode) - if registryPath == "" && isGitRepo(cwd) { - return runPushSingleRepo(ctx, cwd, force) - } - - // Multi-repo mode: find or use provided registry - reg, _, err := loadRegistryWithConfig(registryPath) - if err != nil { - return err - } - - // Build paths and names for git operations - var paths []string - names := make(map[string]string) - - for _, repo := range reg.List() { - if repo.IsGitRepo() { - paths = append(paths, repo.Path) - names[repo.Path] = repo.Name - } - } - - if len(paths) == 0 { - cli.Text(i18n.T("cmd.dev.no_git_repos")) - return nil - } - - // Get status for all repos - statuses := git.Status(ctx, git.StatusOptions{ - Paths: paths, - Names: names, - }) - - // Find repos with unpushed commits - var aheadRepos []git.RepoStatus - for _, s := range statuses { - if s.Error == nil && s.HasUnpushed() { - aheadRepos = append(aheadRepos, s) - } - } - - if len(aheadRepos) == 0 { - cli.Text(i18n.T("cmd.dev.push.all_up_to_date")) - return nil - } - - // Show repos to push - cli.Print("\n%s\n\n", i18n.T("common.count.repos_unpushed", map[string]interface{}{"Count": len(aheadRepos)})) - totalCommits := 0 - for _, s := range aheadRepos { - cli.Print(" %s: %s\n", - repoNameStyle.Render(s.Name), - aheadStyle.Render(i18n.T("common.count.commits", map[string]interface{}{"Count": s.Ahead})), - ) - totalCommits += s.Ahead - } - - // Confirm unless --force - if !force { - cli.Blank() - if !cli.Confirm(i18n.T("cmd.dev.push.confirm_push", map[string]interface{}{"Commits": totalCommits, "Repos": len(aheadRepos)})) { - cli.Text(i18n.T("cli.aborted")) - return nil - } - } - - cli.Blank() - - // Push sequentially (SSH passphrase needs interaction) - var pushPaths []string - for _, s := range aheadRepos { - pushPaths = append(pushPaths, s.Path) - } - - results := git.PushMultiple(ctx, pushPaths, names) - - var succeeded, failed int - var divergedRepos []git.PushResult - - for _, r := range results { - if r.Success { - cli.Print(" %s %s\n", successStyle.Render("v"), r.Name) - succeeded++ - } else { - // Check if this is a non-fast-forward error (diverged branch) - if git.IsNonFastForward(r.Error) { - cli.Print(" %s %s: %s\n", warningStyle.Render("!"), r.Name, i18n.T("cmd.dev.push.diverged")) - divergedRepos = append(divergedRepos, r) - } else { - cli.Print(" %s %s: %s\n", errorStyle.Render("x"), r.Name, r.Error) - } - failed++ - } - } - - // Handle diverged repos - offer to pull and retry - if len(divergedRepos) > 0 { - cli.Blank() - cli.Print("%s\n", i18n.T("cmd.dev.push.diverged_help")) - if cli.Confirm(i18n.T("cmd.dev.push.pull_and_retry")) { - cli.Blank() - for _, r := range divergedRepos { - cli.Print(" %s %s...\n", dimStyle.Render("↓"), r.Name) - if err := git.Pull(ctx, r.Path); err != nil { - cli.Print(" %s %s: %s\n", errorStyle.Render("x"), r.Name, err) - continue - } - cli.Print(" %s %s...\n", dimStyle.Render("↑"), r.Name) - if err := git.Push(ctx, r.Path); err != nil { - cli.Print(" %s %s: %s\n", errorStyle.Render("x"), r.Name, err) - continue - } - cli.Print(" %s %s\n", successStyle.Render("v"), r.Name) - succeeded++ - failed-- - } - } - } - - // Summary - cli.Blank() - cli.Print("%s", successStyle.Render(i18n.T("cmd.dev.push.done_pushed", map[string]interface{}{"Count": succeeded}))) - if failed > 0 { - cli.Print(", %s", errorStyle.Render(i18n.T("common.count.failed", map[string]interface{}{"Count": failed}))) - } - cli.Blank() - - return nil -} - -// runPushSingleRepo handles push for a single repo (current directory). -func runPushSingleRepo(ctx context.Context, repoPath string, force bool) error { - repoName := filepath.Base(repoPath) - - // Get status - statuses := git.Status(ctx, git.StatusOptions{ - Paths: []string{repoPath}, - Names: map[string]string{repoPath: repoName}, - }) - - if len(statuses) == 0 { - return cli.Err("failed to get repo status") - } - - s := statuses[0] - if s.Error != nil { - return s.Error - } - - if !s.HasUnpushed() { - // Check if there are uncommitted changes - if s.IsDirty() { - cli.Print("%s: ", repoNameStyle.Render(s.Name)) - if s.Modified > 0 { - cli.Print("%s ", dirtyStyle.Render(i18n.T("cmd.dev.modified", map[string]interface{}{"Count": s.Modified}))) - } - if s.Untracked > 0 { - cli.Print("%s ", dirtyStyle.Render(i18n.T("cmd.dev.untracked", map[string]interface{}{"Count": s.Untracked}))) - } - if s.Staged > 0 { - cli.Print("%s ", aheadStyle.Render(i18n.T("cmd.dev.staged", map[string]interface{}{"Count": s.Staged}))) - } - cli.Blank() - cli.Blank() - if cli.Confirm(i18n.T("cmd.dev.push.uncommitted_changes_commit")) { - cli.Blank() - // Use edit-enabled commit if only untracked files (may need .gitignore fix) - var err error - if s.Modified == 0 && s.Staged == 0 && s.Untracked > 0 { - err = claudeEditCommit(ctx, repoPath, repoName, "") - } else { - err = runCommitSingleRepo(ctx, repoPath, false) - } - if err != nil { - return err - } - // Re-check - only push if Claude created commits - newStatuses := git.Status(ctx, git.StatusOptions{ - Paths: []string{repoPath}, - Names: map[string]string{repoPath: repoName}, - }) - if len(newStatuses) > 0 && newStatuses[0].HasUnpushed() { - return runPushSingleRepo(ctx, repoPath, force) - } - } - return nil - } - cli.Text(i18n.T("cmd.dev.push.all_up_to_date")) - return nil - } - - // Show commits to push - cli.Print("%s: %s\n", repoNameStyle.Render(s.Name), - aheadStyle.Render(i18n.T("common.count.commits", map[string]interface{}{"Count": s.Ahead}))) - - // Confirm unless --force - if !force { - cli.Blank() - if !cli.Confirm(i18n.T("cmd.dev.push.confirm_push", map[string]interface{}{"Commits": s.Ahead, "Repos": 1})) { - cli.Text(i18n.T("cli.aborted")) - return nil - } - } - - cli.Blank() - - // Push - err := git.Push(ctx, repoPath) - if err != nil { - if git.IsNonFastForward(err) { - cli.Print(" %s %s: %s\n", warningStyle.Render("!"), repoName, i18n.T("cmd.dev.push.diverged")) - cli.Blank() - cli.Print("%s\n", i18n.T("cmd.dev.push.diverged_help")) - if cli.Confirm(i18n.T("cmd.dev.push.pull_and_retry")) { - cli.Blank() - cli.Print(" %s %s...\n", dimStyle.Render("↓"), repoName) - if pullErr := git.Pull(ctx, repoPath); pullErr != nil { - cli.Print(" %s %s: %s\n", errorStyle.Render("x"), repoName, pullErr) - return pullErr - } - cli.Print(" %s %s...\n", dimStyle.Render("↑"), repoName) - if pushErr := git.Push(ctx, repoPath); pushErr != nil { - cli.Print(" %s %s: %s\n", errorStyle.Render("x"), repoName, pushErr) - return pushErr - } - cli.Print(" %s %s\n", successStyle.Render("v"), repoName) - return nil - } - } - cli.Print(" %s %s: %s\n", errorStyle.Render("x"), repoName, err) - return err - } - - cli.Print(" %s %s\n", successStyle.Render("v"), repoName) - return nil -} diff --git a/pkg/dev/cmd_reviews.go b/pkg/dev/cmd_reviews.go deleted file mode 100644 index 3289c9a..0000000 --- a/pkg/dev/cmd_reviews.go +++ /dev/null @@ -1,237 +0,0 @@ -package dev - -import ( - "encoding/json" - "errors" - "os/exec" - "sort" - "strings" - "time" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -// PR-specific styles (aliases to shared) -var ( - prNumberStyle = cli.NumberStyle - prTitleStyle = cli.ValueStyle - prAuthorStyle = cli.InfoStyle - prApprovedStyle = cli.SuccessStyle - prChangesStyle = cli.WarningStyle - prPendingStyle = cli.DimStyle - prDraftStyle = cli.DimStyle -) - -// GitHubPR represents a GitHub pull request. -type GitHubPR struct { - Number int `json:"number"` - Title string `json:"title"` - State string `json:"state"` - IsDraft bool `json:"isDraft"` - CreatedAt time.Time `json:"createdAt"` - Author struct { - Login string `json:"login"` - } `json:"author"` - ReviewDecision string `json:"reviewDecision"` - Reviews struct { - Nodes []struct { - State string `json:"state"` - Author struct { - Login string `json:"login"` - } `json:"author"` - } `json:"nodes"` - } `json:"reviews"` - URL string `json:"url"` - - // Added by us - RepoName string `json:"-"` -} - -// Reviews command flags -var ( - reviewsRegistryPath string - reviewsAuthor string - reviewsShowAll bool -) - -// addReviewsCommand adds the 'reviews' command to the given parent command. -func addReviewsCommand(parent *cli.Command) { - reviewsCmd := &cli.Command{ - Use: "reviews", - Short: i18n.T("cmd.dev.reviews.short"), - Long: i18n.T("cmd.dev.reviews.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runReviews(reviewsRegistryPath, reviewsAuthor, reviewsShowAll) - }, - } - - reviewsCmd.Flags().StringVar(&reviewsRegistryPath, "registry", "", i18n.T("common.flag.registry")) - reviewsCmd.Flags().StringVar(&reviewsAuthor, "author", "", i18n.T("cmd.dev.reviews.flag.author")) - reviewsCmd.Flags().BoolVar(&reviewsShowAll, "all", false, i18n.T("cmd.dev.reviews.flag.all")) - - parent.AddCommand(reviewsCmd) -} - -func runReviews(registryPath string, author string, showAll bool) error { - // Check gh is available - if _, err := exec.LookPath("gh"); err != nil { - return errors.New(i18n.T("error.gh_not_found")) - } - - // Find or use provided registry - reg, _, err := loadRegistryWithConfig(registryPath) - if err != nil { - return err - } - - // Fetch PRs sequentially (avoid GitHub rate limits) - var allPRs []GitHubPR - var fetchErrors []error - - repoList := reg.List() - for i, repo := range repoList { - repoFullName := cli.Sprintf("%s/%s", reg.Org, repo.Name) - cli.Print("\033[2K\r%s %d/%d %s", dimStyle.Render(i18n.T("i18n.progress.fetch")), i+1, len(repoList), repo.Name) - - prs, err := fetchPRs(repoFullName, repo.Name, author) - if err != nil { - fetchErrors = append(fetchErrors, cli.Wrap(err, repo.Name)) - continue - } - - for _, pr := range prs { - // Filter drafts unless --all - if !showAll && pr.IsDraft { - continue - } - allPRs = append(allPRs, pr) - } - } - cli.Print("\033[2K\r") // Clear progress line - - // Sort: pending review first, then by date - sort.Slice(allPRs, func(i, j int) bool { - // Pending reviews come first - iPending := allPRs[i].ReviewDecision == "" || allPRs[i].ReviewDecision == "REVIEW_REQUIRED" - jPending := allPRs[j].ReviewDecision == "" || allPRs[j].ReviewDecision == "REVIEW_REQUIRED" - if iPending != jPending { - return iPending - } - return allPRs[i].CreatedAt.After(allPRs[j].CreatedAt) - }) - - // Print PRs - if len(allPRs) == 0 { - cli.Text(i18n.T("cmd.dev.reviews.no_prs")) - return nil - } - - // Count by status - var pending, approved, changesRequested int - for _, pr := range allPRs { - switch pr.ReviewDecision { - case "APPROVED": - approved++ - case "CHANGES_REQUESTED": - changesRequested++ - default: - pending++ - } - } - - cli.Blank() - cli.Print("%s", i18n.T("cmd.dev.reviews.open_prs", map[string]interface{}{"Count": len(allPRs)})) - if pending > 0 { - cli.Print(" * %s", prPendingStyle.Render(i18n.T("common.count.pending", map[string]interface{}{"Count": pending}))) - } - if approved > 0 { - cli.Print(" * %s", prApprovedStyle.Render(i18n.T("cmd.dev.reviews.approved", map[string]interface{}{"Count": approved}))) - } - if changesRequested > 0 { - cli.Print(" * %s", prChangesStyle.Render(i18n.T("cmd.dev.reviews.changes_requested", map[string]interface{}{"Count": changesRequested}))) - } - cli.Blank() - cli.Blank() - - for _, pr := range allPRs { - printPR(pr) - } - - // Print any errors - if len(fetchErrors) > 0 { - cli.Blank() - for _, err := range fetchErrors { - cli.Print("%s %s\n", errorStyle.Render(i18n.Label("error")), err) - } - } - - return nil -} - -func fetchPRs(repoFullName, repoName string, author string) ([]GitHubPR, error) { - args := []string{ - "pr", "list", - "--repo", repoFullName, - "--state", "open", - "--json", "number,title,state,isDraft,createdAt,author,reviewDecision,reviews,url", - } - - if author != "" { - args = append(args, "--author", author) - } - - cmd := exec.Command("gh", args...) - output, err := cmd.Output() - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - stderr := string(exitErr.Stderr) - if strings.Contains(stderr, "no pull requests") || strings.Contains(stderr, "Could not resolve") { - return nil, nil - } - return nil, cli.Err("%s", stderr) - } - return nil, err - } - - var prs []GitHubPR - if err := json.Unmarshal(output, &prs); err != nil { - return nil, err - } - - // Tag with repo name - for i := range prs { - prs[i].RepoName = repoName - } - - return prs, nil -} - -func printPR(pr GitHubPR) { - // #12 [core-php] Webhook validation - num := prNumberStyle.Render(cli.Sprintf("#%d", pr.Number)) - repo := issueRepoStyle.Render(cli.Sprintf("[%s]", pr.RepoName)) - title := prTitleStyle.Render(cli.Truncate(pr.Title, 50)) - author := prAuthorStyle.Render("@" + pr.Author.Login) - - // Review status - var status string - switch pr.ReviewDecision { - case "APPROVED": - status = prApprovedStyle.Render(i18n.T("cmd.dev.reviews.status_approved")) - case "CHANGES_REQUESTED": - status = prChangesStyle.Render(i18n.T("cmd.dev.reviews.status_changes")) - default: - status = prPendingStyle.Render(i18n.T("cmd.dev.reviews.status_pending")) - } - - // Draft indicator - draft := "" - if pr.IsDraft { - draft = prDraftStyle.Render(" " + i18n.T("cmd.dev.reviews.draft")) - } - - age := cli.FormatAge(pr.CreatedAt) - - cli.Print(" %s %s %s%s %s %s %s\n", num, repo, title, draft, author, status, issueAgeStyle.Render(age)) -} diff --git a/pkg/dev/cmd_sync.go b/pkg/dev/cmd_sync.go deleted file mode 100644 index 87a0a96..0000000 --- a/pkg/dev/cmd_sync.go +++ /dev/null @@ -1,165 +0,0 @@ -package dev - -import ( - "bytes" - "go/ast" - "go/parser" - "go/token" - "os" - "path/filepath" - "text/template" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "golang.org/x/text/cases" - "golang.org/x/text/language" -) - -// addSyncCommand adds the 'sync' command to the given parent command. -func addSyncCommand(parent *cli.Command) { - syncCmd := &cli.Command{ - Use: "sync", - Short: i18n.T("cmd.dev.sync.short"), - Long: i18n.T("cmd.dev.sync.long"), - RunE: func(cmd *cli.Command, args []string) error { - if err := runSync(); err != nil { - return cli.Wrap(err, i18n.Label("error")) - } - cli.Text(i18n.T("i18n.done.sync", "public APIs")) - return nil - }, - } - - parent.AddCommand(syncCmd) -} - -type symbolInfo struct { - Name string - Kind string // "var", "func", "type", "const" -} - -func runSync() error { - pkgDir := "pkg" - internalDirs, err := os.ReadDir(pkgDir) - if err != nil { - return cli.Wrap(err, "failed to read pkg directory") - } - - for _, dir := range internalDirs { - if !dir.IsDir() || dir.Name() == "core" { - continue - } - - serviceName := dir.Name() - internalFile := filepath.Join(pkgDir, serviceName, serviceName+".go") - publicDir := serviceName - publicFile := filepath.Join(publicDir, serviceName+".go") - - if _, err := os.Stat(internalFile); os.IsNotExist(err) { - continue - } - - symbols, err := getExportedSymbols(internalFile) - if err != nil { - return cli.Wrap(err, cli.Sprintf("error getting symbols for service '%s'", serviceName)) - } - - if err := generatePublicAPIFile(publicDir, publicFile, serviceName, symbols); err != nil { - return cli.Wrap(err, cli.Sprintf("error generating public API file for service '%s'", serviceName)) - } - } - - return nil -} - -func getExportedSymbols(path string) ([]symbolInfo, error) { - fset := token.NewFileSet() - node, err := parser.ParseFile(fset, path, nil, parser.ParseComments) - if err != nil { - return nil, err - } - - var symbols []symbolInfo - for name, obj := range node.Scope.Objects { - if ast.IsExported(name) { - kind := "unknown" - switch obj.Kind { - case ast.Con: - kind = "const" - case ast.Var: - kind = "var" - case ast.Fun: - kind = "func" - case ast.Typ: - kind = "type" - } - if kind != "unknown" { - symbols = append(symbols, symbolInfo{Name: name, Kind: kind}) - } - } - } - return symbols, nil -} - -const publicAPITemplate = `// package {{.ServiceName}} provides the public API for the {{.ServiceName}} service. -package {{.ServiceName}} - -import ( - // Import the internal implementation with an alias. - impl "github.com/host-uk/core/{{.ServiceName}}" - - // Import the core contracts to re-export the interface. - "github.com/host-uk/core/core" -) - -{{range .Symbols}} -{{- if eq .Kind "type"}} -// {{.Name}} is the public type for the {{.Name}} service. It is a type alias -// to the underlying implementation, making it transparent to the user. -type {{.Name}} = impl.{{.Name}} -{{else if eq .Kind "const"}} -// {{.Name}} is a public constant that points to the real constant in the implementation package. -const {{.Name}} = impl.{{.Name}} -{{else if eq .Kind "var"}} -// {{.Name}} is a public variable that points to the real variable in the implementation package. -var {{.Name}} = impl.{{.Name}} -{{else if eq .Kind "func"}} -// {{.Name}} is a public function that points to the real function in the implementation package. -var {{.Name}} = impl.{{.Name}} -{{end}} -{{end}} - -// {{.InterfaceName}} is the public interface for the {{.ServiceName}} service. -type {{.InterfaceName}} = core.{{.InterfaceName}} -` - -func generatePublicAPIFile(dir, path, serviceName string, symbols []symbolInfo) error { - if err := os.MkdirAll(dir, os.ModePerm); err != nil { - return err - } - - tmpl, err := template.New("publicAPI").Parse(publicAPITemplate) - if err != nil { - return err - } - - tcaser := cases.Title(language.English) - interfaceName := tcaser.String(serviceName) - - data := struct { - ServiceName string - Symbols []symbolInfo - InterfaceName string - }{ - ServiceName: serviceName, - Symbols: symbols, - InterfaceName: interfaceName, - } - - var buf bytes.Buffer - if err := tmpl.Execute(&buf, data); err != nil { - return err - } - - return os.WriteFile(path, buf.Bytes(), 0644) -} diff --git a/pkg/dev/cmd_vm.go b/pkg/dev/cmd_vm.go deleted file mode 100644 index 71a4ac2..0000000 --- a/pkg/dev/cmd_vm.go +++ /dev/null @@ -1,509 +0,0 @@ -package dev - -import ( - "context" - "errors" - "os" - "time" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/devops" - "github.com/host-uk/core/pkg/i18n" -) - -// addVMCommands adds the dev environment VM commands to the dev parent command. -// These are added as direct subcommands: core dev install, core dev boot, etc. -func addVMCommands(parent *cli.Command) { - addVMInstallCommand(parent) - addVMBootCommand(parent) - addVMStopCommand(parent) - addVMStatusCommand(parent) - addVMShellCommand(parent) - addVMServeCommand(parent) - addVMTestCommand(parent) - addVMClaudeCommand(parent) - addVMUpdateCommand(parent) -} - -// addVMInstallCommand adds the 'dev install' command. -func addVMInstallCommand(parent *cli.Command) { - installCmd := &cli.Command{ - Use: "install", - Short: i18n.T("cmd.dev.vm.install.short"), - Long: i18n.T("cmd.dev.vm.install.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runVMInstall() - }, - } - - parent.AddCommand(installCmd) -} - -func runVMInstall() error { - d, err := devops.New() - if err != nil { - return err - } - - if d.IsInstalled() { - cli.Text(successStyle.Render(i18n.T("cmd.dev.vm.already_installed"))) - cli.Blank() - cli.Text(i18n.T("cmd.dev.vm.check_updates", map[string]interface{}{"Command": dimStyle.Render("core dev update")})) - return nil - } - - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("image")), devops.ImageName()) - cli.Blank() - cli.Text(i18n.T("cmd.dev.vm.downloading")) - cli.Blank() - - ctx := context.Background() - start := time.Now() - var lastProgress int64 - - err = d.Install(ctx, func(downloaded, total int64) { - if total > 0 { - pct := int(float64(downloaded) / float64(total) * 100) - if pct != int(float64(lastProgress)/float64(total)*100) { - cli.Print("\r%s %d%%", dimStyle.Render(i18n.T("cmd.dev.vm.progress_label")), pct) - lastProgress = downloaded - } - } - }) - - cli.Blank() // Clear progress line - - if err != nil { - return cli.Wrap(err, "install failed") - } - - elapsed := time.Since(start).Round(time.Second) - cli.Blank() - cli.Text(i18n.T("cmd.dev.vm.installed_in", map[string]interface{}{"Duration": elapsed})) - cli.Blank() - cli.Text(i18n.T("cmd.dev.vm.start_with", map[string]interface{}{"Command": dimStyle.Render("core dev boot")})) - - return nil -} - -// VM boot command flags -var ( - vmBootMemory int - vmBootCPUs int - vmBootFresh bool -) - -// addVMBootCommand adds the 'devops boot' command. -func addVMBootCommand(parent *cli.Command) { - bootCmd := &cli.Command{ - Use: "boot", - Short: i18n.T("cmd.dev.vm.boot.short"), - Long: i18n.T("cmd.dev.vm.boot.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runVMBoot(vmBootMemory, vmBootCPUs, vmBootFresh) - }, - } - - bootCmd.Flags().IntVar(&vmBootMemory, "memory", 0, i18n.T("cmd.dev.vm.boot.flag.memory")) - bootCmd.Flags().IntVar(&vmBootCPUs, "cpus", 0, i18n.T("cmd.dev.vm.boot.flag.cpus")) - bootCmd.Flags().BoolVar(&vmBootFresh, "fresh", false, i18n.T("cmd.dev.vm.boot.flag.fresh")) - - parent.AddCommand(bootCmd) -} - -func runVMBoot(memory, cpus int, fresh bool) error { - d, err := devops.New() - if err != nil { - return err - } - - if !d.IsInstalled() { - return errors.New(i18n.T("cmd.dev.vm.not_installed")) - } - - opts := devops.DefaultBootOptions() - if memory > 0 { - opts.Memory = memory - } - if cpus > 0 { - opts.CPUs = cpus - } - opts.Fresh = fresh - - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.dev.vm.config_label")), i18n.T("cmd.dev.vm.config_value", map[string]interface{}{"Memory": opts.Memory, "CPUs": opts.CPUs})) - cli.Blank() - cli.Text(i18n.T("cmd.dev.vm.booting")) - - ctx := context.Background() - if err := d.Boot(ctx, opts); err != nil { - return err - } - - cli.Blank() - cli.Text(successStyle.Render(i18n.T("cmd.dev.vm.running"))) - cli.Blank() - cli.Text(i18n.T("cmd.dev.vm.connect_with", map[string]interface{}{"Command": dimStyle.Render("core dev shell")})) - cli.Print("%s %s\n", i18n.T("cmd.dev.vm.ssh_port"), dimStyle.Render("2222")) - - return nil -} - -// addVMStopCommand adds the 'devops stop' command. -func addVMStopCommand(parent *cli.Command) { - stopCmd := &cli.Command{ - Use: "stop", - Short: i18n.T("cmd.dev.vm.stop.short"), - Long: i18n.T("cmd.dev.vm.stop.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runVMStop() - }, - } - - parent.AddCommand(stopCmd) -} - -func runVMStop() error { - d, err := devops.New() - if err != nil { - return err - } - - ctx := context.Background() - running, err := d.IsRunning(ctx) - if err != nil { - return err - } - - if !running { - cli.Text(dimStyle.Render(i18n.T("cmd.dev.vm.not_running"))) - return nil - } - - cli.Text(i18n.T("cmd.dev.vm.stopping")) - - if err := d.Stop(ctx); err != nil { - return err - } - - cli.Text(successStyle.Render(i18n.T("common.status.stopped"))) - return nil -} - -// addVMStatusCommand adds the 'devops status' command. -func addVMStatusCommand(parent *cli.Command) { - statusCmd := &cli.Command{ - Use: "vm-status", - Short: i18n.T("cmd.dev.vm.status.short"), - Long: i18n.T("cmd.dev.vm.status.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runVMStatus() - }, - } - - parent.AddCommand(statusCmd) -} - -func runVMStatus() error { - d, err := devops.New() - if err != nil { - return err - } - - ctx := context.Background() - status, err := d.Status(ctx) - if err != nil { - return err - } - - cli.Text(headerStyle.Render(i18n.T("cmd.dev.vm.status_title"))) - cli.Blank() - - // Installation status - if status.Installed { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.dev.vm.installed_label")), successStyle.Render(i18n.T("cmd.dev.vm.installed_yes"))) - if status.ImageVersion != "" { - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("version")), status.ImageVersion) - } - } else { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.dev.vm.installed_label")), errorStyle.Render(i18n.T("cmd.dev.vm.installed_no"))) - cli.Blank() - cli.Text(i18n.T("cmd.dev.vm.install_with", map[string]interface{}{"Command": dimStyle.Render("core dev install")})) - return nil - } - - cli.Blank() - - // Running status - if status.Running { - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("status")), successStyle.Render(i18n.T("common.status.running"))) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.dev.vm.container_label")), status.ContainerID[:8]) - cli.Print("%s %dMB\n", dimStyle.Render(i18n.T("cmd.dev.vm.memory_label")), status.Memory) - cli.Print("%s %d\n", dimStyle.Render(i18n.T("cmd.dev.vm.cpus_label")), status.CPUs) - cli.Print("%s %d\n", dimStyle.Render(i18n.T("cmd.dev.vm.ssh_port")), status.SSHPort) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.dev.vm.uptime_label")), formatVMUptime(status.Uptime)) - } else { - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("status")), dimStyle.Render(i18n.T("common.status.stopped"))) - cli.Blank() - cli.Text(i18n.T("cmd.dev.vm.start_with", map[string]interface{}{"Command": dimStyle.Render("core dev boot")})) - } - - return nil -} - -func formatVMUptime(d time.Duration) string { - if d < time.Minute { - return cli.Sprintf("%ds", int(d.Seconds())) - } - if d < time.Hour { - return cli.Sprintf("%dm", int(d.Minutes())) - } - if d < 24*time.Hour { - return cli.Sprintf("%dh %dm", int(d.Hours()), int(d.Minutes())%60) - } - return cli.Sprintf("%dd %dh", int(d.Hours()/24), int(d.Hours())%24) -} - -// VM shell command flags -var vmShellConsole bool - -// addVMShellCommand adds the 'devops shell' command. -func addVMShellCommand(parent *cli.Command) { - shellCmd := &cli.Command{ - Use: "shell [-- command...]", - Short: i18n.T("cmd.dev.vm.shell.short"), - Long: i18n.T("cmd.dev.vm.shell.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runVMShell(vmShellConsole, args) - }, - } - - shellCmd.Flags().BoolVar(&vmShellConsole, "console", false, i18n.T("cmd.dev.vm.shell.flag.console")) - - parent.AddCommand(shellCmd) -} - -func runVMShell(console bool, command []string) error { - d, err := devops.New() - if err != nil { - return err - } - - opts := devops.ShellOptions{ - Console: console, - Command: command, - } - - ctx := context.Background() - return d.Shell(ctx, opts) -} - -// VM serve command flags -var ( - vmServePort int - vmServePath string -) - -// addVMServeCommand adds the 'devops serve' command. -func addVMServeCommand(parent *cli.Command) { - serveCmd := &cli.Command{ - Use: "serve", - Short: i18n.T("cmd.dev.vm.serve.short"), - Long: i18n.T("cmd.dev.vm.serve.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runVMServe(vmServePort, vmServePath) - }, - } - - serveCmd.Flags().IntVarP(&vmServePort, "port", "p", 0, i18n.T("cmd.dev.vm.serve.flag.port")) - serveCmd.Flags().StringVar(&vmServePath, "path", "", i18n.T("cmd.dev.vm.serve.flag.path")) - - parent.AddCommand(serveCmd) -} - -func runVMServe(port int, path string) error { - d, err := devops.New() - if err != nil { - return err - } - - projectDir, err := os.Getwd() - if err != nil { - return err - } - - opts := devops.ServeOptions{ - Port: port, - Path: path, - } - - ctx := context.Background() - return d.Serve(ctx, projectDir, opts) -} - -// VM test command flags -var vmTestName string - -// addVMTestCommand adds the 'devops test' command. -func addVMTestCommand(parent *cli.Command) { - testCmd := &cli.Command{ - Use: "test [-- command...]", - Short: i18n.T("cmd.dev.vm.test.short"), - Long: i18n.T("cmd.dev.vm.test.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runVMTest(vmTestName, args) - }, - } - - testCmd.Flags().StringVarP(&vmTestName, "name", "n", "", i18n.T("cmd.dev.vm.test.flag.name")) - - parent.AddCommand(testCmd) -} - -func runVMTest(name string, command []string) error { - d, err := devops.New() - if err != nil { - return err - } - - projectDir, err := os.Getwd() - if err != nil { - return err - } - - opts := devops.TestOptions{ - Name: name, - Command: command, - } - - ctx := context.Background() - return d.Test(ctx, projectDir, opts) -} - -// VM claude command flags -var ( - vmClaudeNoAuth bool - vmClaudeModel string - vmClaudeAuthFlags []string -) - -// addVMClaudeCommand adds the 'devops claude' command. -func addVMClaudeCommand(parent *cli.Command) { - claudeCmd := &cli.Command{ - Use: "claude", - Short: i18n.T("cmd.dev.vm.claude.short"), - Long: i18n.T("cmd.dev.vm.claude.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runVMClaude(vmClaudeNoAuth, vmClaudeModel, vmClaudeAuthFlags) - }, - } - - claudeCmd.Flags().BoolVar(&vmClaudeNoAuth, "no-auth", false, i18n.T("cmd.dev.vm.claude.flag.no_auth")) - claudeCmd.Flags().StringVarP(&vmClaudeModel, "model", "m", "", i18n.T("cmd.dev.vm.claude.flag.model")) - claudeCmd.Flags().StringSliceVar(&vmClaudeAuthFlags, "auth", nil, i18n.T("cmd.dev.vm.claude.flag.auth")) - - parent.AddCommand(claudeCmd) -} - -func runVMClaude(noAuth bool, model string, authFlags []string) error { - d, err := devops.New() - if err != nil { - return err - } - - projectDir, err := os.Getwd() - if err != nil { - return err - } - - opts := devops.ClaudeOptions{ - NoAuth: noAuth, - Model: model, - Auth: authFlags, - } - - ctx := context.Background() - return d.Claude(ctx, projectDir, opts) -} - -// VM update command flags -var vmUpdateApply bool - -// addVMUpdateCommand adds the 'devops update' command. -func addVMUpdateCommand(parent *cli.Command) { - updateCmd := &cli.Command{ - Use: "update", - Short: i18n.T("cmd.dev.vm.update.short"), - Long: i18n.T("cmd.dev.vm.update.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runVMUpdate(vmUpdateApply) - }, - } - - updateCmd.Flags().BoolVar(&vmUpdateApply, "apply", false, i18n.T("cmd.dev.vm.update.flag.apply")) - - parent.AddCommand(updateCmd) -} - -func runVMUpdate(apply bool) error { - d, err := devops.New() - if err != nil { - return err - } - - ctx := context.Background() - - cli.Text(i18n.T("common.progress.checking_updates")) - cli.Blank() - - current, latest, hasUpdate, err := d.CheckUpdate(ctx) - if err != nil { - return cli.Wrap(err, "failed to check for updates") - } - - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("current")), valueStyle.Render(current)) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.dev.vm.latest_label")), valueStyle.Render(latest)) - cli.Blank() - - if !hasUpdate { - cli.Text(successStyle.Render(i18n.T("cmd.dev.vm.up_to_date"))) - return nil - } - - cli.Text(warningStyle.Render(i18n.T("cmd.dev.vm.update_available"))) - cli.Blank() - - if !apply { - cli.Text(i18n.T("cmd.dev.vm.run_to_update", map[string]interface{}{"Command": dimStyle.Render("core dev update --apply")})) - return nil - } - - // Stop if running - running, _ := d.IsRunning(ctx) - if running { - cli.Text(i18n.T("cmd.dev.vm.stopping_current")) - _ = d.Stop(ctx) - } - - cli.Text(i18n.T("cmd.dev.vm.downloading_update")) - cli.Blank() - - start := time.Now() - err = d.Install(ctx, func(downloaded, total int64) { - if total > 0 { - pct := int(float64(downloaded) / float64(total) * 100) - cli.Print("\r%s %d%%", dimStyle.Render(i18n.T("cmd.dev.vm.progress_label")), pct) - } - }) - - cli.Blank() - - if err != nil { - return cli.Wrap(err, "update failed") - } - - elapsed := time.Since(start).Round(time.Second) - cli.Blank() - cli.Text(i18n.T("cmd.dev.vm.updated_in", map[string]interface{}{"Duration": elapsed})) - - return nil -} diff --git a/pkg/dev/cmd_work.go b/pkg/dev/cmd_work.go deleted file mode 100644 index 07d98d5..0000000 --- a/pkg/dev/cmd_work.go +++ /dev/null @@ -1,346 +0,0 @@ -package dev - -import ( - "context" - "os" - "os/exec" - "sort" - "strings" - - "github.com/host-uk/core/pkg/agentic" - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/git" - "github.com/host-uk/core/pkg/i18n" -) - -// Work command flags -var ( - workStatusOnly bool - workAutoCommit bool - workRegistryPath string -) - -// addWorkCommand adds the 'work' command to the given parent command. -func addWorkCommand(parent *cli.Command) { - workCmd := &cli.Command{ - Use: "work", - Short: i18n.T("cmd.dev.work.short"), - Long: i18n.T("cmd.dev.work.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runWork(workRegistryPath, workStatusOnly, workAutoCommit) - }, - } - - workCmd.Flags().BoolVar(&workStatusOnly, "status", false, i18n.T("cmd.dev.work.flag.status")) - workCmd.Flags().BoolVar(&workAutoCommit, "commit", false, i18n.T("cmd.dev.work.flag.commit")) - workCmd.Flags().StringVar(&workRegistryPath, "registry", "", i18n.T("common.flag.registry")) - - parent.AddCommand(workCmd) -} - -func runWork(registryPath string, statusOnly, autoCommit bool) error { - ctx := context.Background() - - // Build worker bundle with required services - bundle, err := NewWorkBundle(WorkBundleOptions{ - RegistryPath: registryPath, - }) - if err != nil { - return err - } - - // Start services (registers handlers) - if err := bundle.Start(ctx); err != nil { - return err - } - defer bundle.Stop(ctx) - - // Load registry and get paths - paths, names, err := func() ([]string, map[string]string, error) { - reg, _, err := loadRegistryWithConfig(registryPath) - if err != nil { - return nil, nil, err - } - var paths []string - names := make(map[string]string) - for _, repo := range reg.List() { - if repo.IsGitRepo() { - paths = append(paths, repo.Path) - names[repo.Path] = repo.Name - } - } - return paths, names, nil - }() - if err != nil { - return err - } - - if len(paths) == 0 { - cli.Text(i18n.T("cmd.dev.no_git_repos")) - return nil - } - - // QUERY git status - result, handled, err := bundle.Core.QUERY(git.QueryStatus{ - Paths: paths, - Names: names, - }) - if !handled { - return cli.Err("git service not available") - } - if err != nil { - return err - } - statuses := result.([]git.RepoStatus) - - // Sort by repo name for consistent output - sort.Slice(statuses, func(i, j int) bool { - return statuses[i].Name < statuses[j].Name - }) - - // Display status table - printStatusTable(statuses) - - // Collect dirty and ahead repos - var dirtyRepos []git.RepoStatus - var aheadRepos []git.RepoStatus - - for _, s := range statuses { - if s.Error != nil { - continue - } - if s.IsDirty() { - dirtyRepos = append(dirtyRepos, s) - } - if s.HasUnpushed() { - aheadRepos = append(aheadRepos, s) - } - } - - // Auto-commit dirty repos if requested - if autoCommit && len(dirtyRepos) > 0 { - cli.Blank() - cli.Print("%s\n", cli.TitleStyle.Render(i18n.T("cmd.dev.commit.committing"))) - cli.Blank() - - for _, s := range dirtyRepos { - // PERFORM commit via agentic service - _, handled, err := bundle.Core.PERFORM(agentic.TaskCommit{ - Path: s.Path, - Name: s.Name, - }) - if !handled { - cli.Print(" %s %s: %s\n", warningStyle.Render("!"), s.Name, "agentic service not available") - continue - } - if err != nil { - cli.Print(" %s %s: %s\n", errorStyle.Render("x"), s.Name, err) - } else { - cli.Print(" %s %s\n", successStyle.Render("v"), s.Name) - } - } - - // Re-QUERY status after commits - result, _, _ = bundle.Core.QUERY(git.QueryStatus{ - Paths: paths, - Names: names, - }) - statuses = result.([]git.RepoStatus) - - // Rebuild ahead repos list - aheadRepos = nil - for _, s := range statuses { - if s.Error == nil && s.HasUnpushed() { - aheadRepos = append(aheadRepos, s) - } - } - } - - // If status only, we're done - if statusOnly { - if len(dirtyRepos) > 0 && !autoCommit { - cli.Blank() - cli.Print("%s\n", dimStyle.Render(i18n.T("cmd.dev.work.use_commit_flag"))) - } - return nil - } - - // Push repos with unpushed commits - if len(aheadRepos) == 0 { - cli.Blank() - cli.Text(i18n.T("cmd.dev.work.all_up_to_date")) - return nil - } - - cli.Blank() - cli.Print("%s\n", i18n.T("common.count.repos_unpushed", map[string]interface{}{"Count": len(aheadRepos)})) - for _, s := range aheadRepos { - cli.Print(" %s: %s\n", s.Name, i18n.T("common.count.commits", map[string]interface{}{"Count": s.Ahead})) - } - - cli.Blank() - if !cli.Confirm(i18n.T("cmd.dev.push.confirm")) { - cli.Text(i18n.T("cli.aborted")) - return nil - } - - cli.Blank() - - // PERFORM push for each repo - var divergedRepos []git.RepoStatus - - for _, s := range aheadRepos { - _, handled, err := bundle.Core.PERFORM(git.TaskPush{ - Path: s.Path, - Name: s.Name, - }) - if !handled { - cli.Print(" %s %s: %s\n", errorStyle.Render("x"), s.Name, "git service not available") - continue - } - if err != nil { - if git.IsNonFastForward(err) { - cli.Print(" %s %s: %s\n", warningStyle.Render("!"), s.Name, i18n.T("cmd.dev.push.diverged")) - divergedRepos = append(divergedRepos, s) - } else { - cli.Print(" %s %s: %s\n", errorStyle.Render("x"), s.Name, err) - } - } else { - cli.Print(" %s %s\n", successStyle.Render("v"), s.Name) - } - } - - // Handle diverged repos - offer to pull and retry - if len(divergedRepos) > 0 { - cli.Blank() - cli.Print("%s\n", i18n.T("cmd.dev.push.diverged_help")) - if cli.Confirm(i18n.T("cmd.dev.push.pull_and_retry")) { - cli.Blank() - for _, s := range divergedRepos { - cli.Print(" %s %s...\n", dimStyle.Render("↓"), s.Name) - - // PERFORM pull - _, _, err := bundle.Core.PERFORM(git.TaskPull{Path: s.Path, Name: s.Name}) - if err != nil { - cli.Print(" %s %s: %s\n", errorStyle.Render("x"), s.Name, err) - continue - } - - cli.Print(" %s %s...\n", dimStyle.Render("↑"), s.Name) - - // PERFORM push - _, _, err = bundle.Core.PERFORM(git.TaskPush{Path: s.Path, Name: s.Name}) - if err != nil { - cli.Print(" %s %s: %s\n", errorStyle.Render("x"), s.Name, err) - continue - } - - cli.Print(" %s %s\n", successStyle.Render("v"), s.Name) - } - } - } - - return nil -} - -func printStatusTable(statuses []git.RepoStatus) { - // Calculate column widths - nameWidth := 4 // "Repo" - for _, s := range statuses { - if len(s.Name) > nameWidth { - nameWidth = len(s.Name) - } - } - - // Print header with fixed-width formatting - cli.Print("%-*s %8s %9s %6s %5s\n", - nameWidth, - cli.TitleStyle.Render(i18n.Label("repo")), - cli.TitleStyle.Render(i18n.T("cmd.dev.work.table_modified")), - cli.TitleStyle.Render(i18n.T("cmd.dev.work.table_untracked")), - cli.TitleStyle.Render(i18n.T("cmd.dev.work.table_staged")), - cli.TitleStyle.Render(i18n.T("cmd.dev.work.table_ahead")), - ) - - // Print separator - cli.Text(strings.Repeat("-", nameWidth+2+10+11+8+7)) - - // Print rows - for _, s := range statuses { - if s.Error != nil { - paddedName := cli.Sprintf("%-*s", nameWidth, s.Name) - cli.Print("%s %s\n", - repoNameStyle.Render(paddedName), - errorStyle.Render(i18n.T("cmd.dev.work.error_prefix")+" "+s.Error.Error()), - ) - continue - } - - // Style numbers based on values - modStr := cli.Sprintf("%d", s.Modified) - if s.Modified > 0 { - modStr = dirtyStyle.Render(modStr) - } else { - modStr = cleanStyle.Render(modStr) - } - - untrackedStr := cli.Sprintf("%d", s.Untracked) - if s.Untracked > 0 { - untrackedStr = dirtyStyle.Render(untrackedStr) - } else { - untrackedStr = cleanStyle.Render(untrackedStr) - } - - stagedStr := cli.Sprintf("%d", s.Staged) - if s.Staged > 0 { - stagedStr = aheadStyle.Render(stagedStr) - } else { - stagedStr = cleanStyle.Render(stagedStr) - } - - aheadStr := cli.Sprintf("%d", s.Ahead) - if s.Ahead > 0 { - aheadStr = aheadStyle.Render(aheadStr) - } else { - aheadStr = cleanStyle.Render(aheadStr) - } - - // Pad name before styling to avoid ANSI code length issues - paddedName := cli.Sprintf("%-*s", nameWidth, s.Name) - cli.Print("%s %8s %9s %6s %5s\n", - repoNameStyle.Render(paddedName), - modStr, - untrackedStr, - stagedStr, - aheadStr, - ) - } -} - -// claudeCommit shells out to claude for committing (legacy helper for other commands) -func claudeCommit(ctx context.Context, repoPath, repoName, registryPath string) error { - prompt := agentic.Prompt("commit") - - cmd := exec.CommandContext(ctx, "claude", "-p", prompt, "--allowedTools", "Bash,Read,Glob,Grep") - cmd.Dir = repoPath - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - cmd.Stdin = os.Stdin - - return cmd.Run() -} - -// claudeEditCommit shells out to claude with edit permissions (legacy helper) -func claudeEditCommit(ctx context.Context, repoPath, repoName, registryPath string) error { - prompt := agentic.Prompt("commit") - - cmd := exec.CommandContext(ctx, "claude", "-p", prompt, "--allowedTools", "Bash,Read,Write,Edit,Glob,Grep") - cmd.Dir = repoPath - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - cmd.Stdin = os.Stdin - - return cmd.Run() -} - - diff --git a/pkg/dev/cmd_workflow.go b/pkg/dev/cmd_workflow.go deleted file mode 100644 index 354f938..0000000 --- a/pkg/dev/cmd_workflow.go +++ /dev/null @@ -1,307 +0,0 @@ -package dev - -import ( - "os" - "path/filepath" - "sort" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -// Workflow command flags -var ( - workflowRegistryPath string - workflowDryRun bool -) - -// addWorkflowCommands adds the 'workflow' subcommand and its subcommands. -func addWorkflowCommands(parent *cli.Command) { - workflowCmd := &cli.Command{ - Use: "workflow", - Short: i18n.T("cmd.dev.workflow.short"), - Long: i18n.T("cmd.dev.workflow.long"), - } - - // Shared flags - workflowCmd.PersistentFlags().StringVar(&workflowRegistryPath, "registry", "", i18n.T("common.flag.registry")) - - // Subcommands - addWorkflowListCommand(workflowCmd) - addWorkflowSyncCommand(workflowCmd) - - parent.AddCommand(workflowCmd) -} - -// addWorkflowListCommand adds the 'workflow list' subcommand. -func addWorkflowListCommand(parent *cli.Command) { - listCmd := &cli.Command{ - Use: "list", - Short: i18n.T("cmd.dev.workflow.list.short"), - Long: i18n.T("cmd.dev.workflow.list.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runWorkflowList(workflowRegistryPath) - }, - } - - parent.AddCommand(listCmd) -} - -// addWorkflowSyncCommand adds the 'workflow sync' subcommand. -func addWorkflowSyncCommand(parent *cli.Command) { - syncCmd := &cli.Command{ - Use: "sync ", - Short: i18n.T("cmd.dev.workflow.sync.short"), - Long: i18n.T("cmd.dev.workflow.sync.long"), - Args: cli.ExactArgs(1), - RunE: func(cmd *cli.Command, args []string) error { - return runWorkflowSync(workflowRegistryPath, args[0], workflowDryRun) - }, - } - - syncCmd.Flags().BoolVar(&workflowDryRun, "dry-run", false, i18n.T("cmd.dev.workflow.sync.flag.dry_run")) - - parent.AddCommand(syncCmd) -} - -// runWorkflowList shows a table of repos vs workflows. -func runWorkflowList(registryPath string) error { - reg, registryDir, err := loadRegistryWithConfig(registryPath) - if err != nil { - return err - } - - repoList := reg.List() - if len(repoList) == 0 { - cli.Text(i18n.T("cmd.dev.no_git_repos")) - return nil - } - - // Sort repos by name for consistent output - sort.Slice(repoList, func(i, j int) bool { - return repoList[i].Name < repoList[j].Name - }) - - // Collect all unique workflow files across all repos - workflowSet := make(map[string]bool) - repoWorkflows := make(map[string]map[string]bool) - - for _, repo := range repoList { - workflows := findWorkflows(repo.Path) - repoWorkflows[repo.Name] = make(map[string]bool) - for _, wf := range workflows { - workflowSet[wf] = true - repoWorkflows[repo.Name][wf] = true - } - } - - // Sort workflow names - var workflowNames []string - for wf := range workflowSet { - workflowNames = append(workflowNames, wf) - } - sort.Strings(workflowNames) - - if len(workflowNames) == 0 { - cli.Text(i18n.T("cmd.dev.workflow.no_workflows")) - return nil - } - - // Check for template workflows in the registry directory - templateWorkflows := findWorkflows(filepath.Join(registryDir, ".github", "workflow-templates")) - if len(templateWorkflows) == 0 { - // Also check .github/workflows in the devops repo itself - templateWorkflows = findWorkflows(filepath.Join(registryDir, ".github", "workflows")) - } - templateSet := make(map[string]bool) - for _, wf := range templateWorkflows { - templateSet[wf] = true - } - - // Build table - headers := []string{i18n.T("cmd.dev.workflow.header.repo")} - headers = append(headers, workflowNames...) - table := cli.NewTable(headers...) - - for _, repo := range repoList { - row := []string{repo.Name} - for _, wf := range workflowNames { - if repoWorkflows[repo.Name][wf] { - row = append(row, successStyle.Render(cli.Glyph(":check:"))) - } else { - row = append(row, errorStyle.Render(cli.Glyph(":cross:"))) - } - } - table.AddRow(row...) - } - - cli.Blank() - table.Render() - - return nil -} - -// runWorkflowSync copies a workflow template to all repos. -func runWorkflowSync(registryPath string, workflowFile string, dryRun bool) error { - reg, registryDir, err := loadRegistryWithConfig(registryPath) - if err != nil { - return err - } - - // Find the template workflow - templatePath := findTemplateWorkflow(registryDir, workflowFile) - if templatePath == "" { - return cli.Err("%s", i18n.T("cmd.dev.workflow.template_not_found", map[string]interface{}{"File": workflowFile})) - } - - // Read template content - templateContent, err := os.ReadFile(templatePath) - if err != nil { - return cli.Wrap(err, i18n.T("cmd.dev.workflow.read_template_error")) - } - - repoList := reg.List() - if len(repoList) == 0 { - cli.Text(i18n.T("cmd.dev.no_git_repos")) - return nil - } - - // Sort repos by name for consistent output - sort.Slice(repoList, func(i, j int) bool { - return repoList[i].Name < repoList[j].Name - }) - - if dryRun { - cli.Text(i18n.T("cmd.dev.workflow.dry_run_mode")) - cli.Blank() - } - - var synced, skipped, failed int - - for _, repo := range repoList { - if !repo.IsGitRepo() { - skipped++ - continue - } - - destDir := filepath.Join(repo.Path, ".github", "workflows") - destPath := filepath.Join(destDir, workflowFile) - - // Check if workflow already exists and is identical - if existingContent, err := os.ReadFile(destPath); err == nil { - if string(existingContent) == string(templateContent) { - cli.Print(" %s %s %s\n", - dimStyle.Render("-"), - repoNameStyle.Render(repo.Name), - dimStyle.Render(i18n.T("cmd.dev.workflow.up_to_date"))) - skipped++ - continue - } - } - - if dryRun { - cli.Print(" %s %s %s\n", - warningStyle.Render("*"), - repoNameStyle.Render(repo.Name), - i18n.T("cmd.dev.workflow.would_sync")) - synced++ - continue - } - - // Create .github/workflows directory if needed - if err := os.MkdirAll(destDir, 0755); err != nil { - cli.Print(" %s %s %s\n", - errorStyle.Render(cli.Glyph(":cross:")), - repoNameStyle.Render(repo.Name), - err.Error()) - failed++ - continue - } - - // Write workflow file - if err := os.WriteFile(destPath, templateContent, 0644); err != nil { - cli.Print(" %s %s %s\n", - errorStyle.Render(cli.Glyph(":cross:")), - repoNameStyle.Render(repo.Name), - err.Error()) - failed++ - continue - } - - cli.Print(" %s %s %s\n", - successStyle.Render(cli.Glyph(":check:")), - repoNameStyle.Render(repo.Name), - i18n.T("cmd.dev.workflow.synced")) - synced++ - } - - cli.Blank() - - // Summary - if dryRun { - cli.Print("%s %s\n", - i18n.T("cmd.dev.workflow.would_sync_count", map[string]interface{}{"Count": synced}), - dimStyle.Render(i18n.T("cmd.dev.workflow.skipped_count", map[string]interface{}{"Count": skipped}))) - cli.Text(i18n.T("cmd.dev.workflow.run_without_dry_run")) - } else { - cli.Print("%s %s\n", - successStyle.Render(i18n.T("cmd.dev.workflow.synced_count", map[string]interface{}{"Count": synced})), - dimStyle.Render(i18n.T("cmd.dev.workflow.skipped_count", map[string]interface{}{"Count": skipped}))) - if failed > 0 { - cli.Print("%s\n", errorStyle.Render(i18n.T("cmd.dev.workflow.failed_count", map[string]interface{}{"Count": failed}))) - } - } - - return nil -} - -// findWorkflows returns a list of workflow file names in a directory. -func findWorkflows(dir string) []string { - workflowsDir := filepath.Join(dir, ".github", "workflows") - // If dir already ends with workflows path, use it directly - if strings.HasSuffix(dir, "workflows") || strings.HasSuffix(dir, "workflow-templates") { - workflowsDir = dir - } - - entries, err := os.ReadDir(workflowsDir) - if err != nil { - return nil - } - - var workflows []string - for _, entry := range entries { - if entry.IsDir() { - continue - } - name := entry.Name() - if strings.HasSuffix(name, ".yml") || strings.HasSuffix(name, ".yaml") { - workflows = append(workflows, name) - } - } - - return workflows -} - -// findTemplateWorkflow finds a workflow template file in common locations. -func findTemplateWorkflow(registryDir, workflowFile string) string { - // Ensure .yml extension - if !strings.HasSuffix(workflowFile, ".yml") && !strings.HasSuffix(workflowFile, ".yaml") { - workflowFile = workflowFile + ".yml" - } - - // Check common template locations - candidates := []string{ - filepath.Join(registryDir, ".github", "workflow-templates", workflowFile), - filepath.Join(registryDir, ".github", "workflows", workflowFile), - filepath.Join(registryDir, "workflow-templates", workflowFile), - } - - for _, candidate := range candidates { - if _, err := os.Stat(candidate); err == nil { - return candidate - } - } - - return "" -} diff --git a/pkg/dev/cmd_workflow_test.go b/pkg/dev/cmd_workflow_test.go deleted file mode 100644 index 3f0cd82..0000000 --- a/pkg/dev/cmd_workflow_test.go +++ /dev/null @@ -1,107 +0,0 @@ -package dev - -import ( - "os" - "path/filepath" - "testing" -) - -func TestFindWorkflows_Good(t *testing.T) { - // Create a temp directory with workflow files - tmpDir := t.TempDir() - workflowsDir := filepath.Join(tmpDir, ".github", "workflows") - if err := os.MkdirAll(workflowsDir, 0755); err != nil { - t.Fatalf("Failed to create workflows dir: %v", err) - } - - // Create some workflow files - for _, name := range []string{"qa.yml", "tests.yml", "codeql.yaml"} { - if err := os.WriteFile(filepath.Join(workflowsDir, name), []byte("name: Test"), 0644); err != nil { - t.Fatalf("Failed to create workflow file: %v", err) - } - } - - // Create a non-workflow file (should be ignored) - if err := os.WriteFile(filepath.Join(workflowsDir, "readme.md"), []byte("# Workflows"), 0644); err != nil { - t.Fatalf("Failed to create readme file: %v", err) - } - - workflows := findWorkflows(tmpDir) - - if len(workflows) != 3 { - t.Errorf("Expected 3 workflows, got %d", len(workflows)) - } - - // Check that all expected workflows are found - found := make(map[string]bool) - for _, wf := range workflows { - found[wf] = true - } - - for _, expected := range []string{"qa.yml", "tests.yml", "codeql.yaml"} { - if !found[expected] { - t.Errorf("Expected to find workflow %s", expected) - } - } -} - -func TestFindWorkflows_NoWorkflowsDir(t *testing.T) { - tmpDir := t.TempDir() - workflows := findWorkflows(tmpDir) - - if len(workflows) != 0 { - t.Errorf("Expected 0 workflows for non-existent dir, got %d", len(workflows)) - } -} - -func TestFindTemplateWorkflow_Good(t *testing.T) { - tmpDir := t.TempDir() - templatesDir := filepath.Join(tmpDir, ".github", "workflow-templates") - if err := os.MkdirAll(templatesDir, 0755); err != nil { - t.Fatalf("Failed to create templates dir: %v", err) - } - - templateContent := "name: QA\non: [push]" - if err := os.WriteFile(filepath.Join(templatesDir, "qa.yml"), []byte(templateContent), 0644); err != nil { - t.Fatalf("Failed to create template file: %v", err) - } - - // Test finding with .yml extension - result := findTemplateWorkflow(tmpDir, "qa.yml") - if result == "" { - t.Error("Expected to find qa.yml template") - } - - // Test finding without extension (should auto-add .yml) - result = findTemplateWorkflow(tmpDir, "qa") - if result == "" { - t.Error("Expected to find qa template without extension") - } -} - -func TestFindTemplateWorkflow_FallbackToWorkflows(t *testing.T) { - tmpDir := t.TempDir() - workflowsDir := filepath.Join(tmpDir, ".github", "workflows") - if err := os.MkdirAll(workflowsDir, 0755); err != nil { - t.Fatalf("Failed to create workflows dir: %v", err) - } - - templateContent := "name: Tests\non: [push]" - if err := os.WriteFile(filepath.Join(workflowsDir, "tests.yml"), []byte(templateContent), 0644); err != nil { - t.Fatalf("Failed to create workflow file: %v", err) - } - - result := findTemplateWorkflow(tmpDir, "tests.yml") - if result == "" { - t.Error("Expected to find tests.yml in workflows dir") - } -} - -func TestFindTemplateWorkflow_NotFound(t *testing.T) { - tmpDir := t.TempDir() - - result := findTemplateWorkflow(tmpDir, "nonexistent.yml") - if result != "" { - t.Errorf("Expected empty string for non-existent template, got %s", result) - } -} diff --git a/pkg/dev/registry.go b/pkg/dev/registry.go deleted file mode 100644 index 8d4b9b8..0000000 --- a/pkg/dev/registry.go +++ /dev/null @@ -1,68 +0,0 @@ -package dev - -import ( - "os" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" - "github.com/host-uk/core/pkg/workspace" -) - -// loadRegistryWithConfig loads the registry and applies workspace configuration. -func loadRegistryWithConfig(registryPath string) (*repos.Registry, string, error) { - var reg *repos.Registry - var err error - var registryDir string - - if registryPath != "" { - reg, err = repos.LoadRegistry(registryPath) - if err != nil { - return nil, "", cli.Wrap(err, "failed to load registry") - } - cli.Print("%s %s\n\n", dimStyle.Render(i18n.Label("registry")), registryPath) - registryDir = filepath.Dir(registryPath) - } else { - registryPath, err = repos.FindRegistry() - if err == nil { - reg, err = repos.LoadRegistry(registryPath) - if err != nil { - return nil, "", cli.Wrap(err, "failed to load registry") - } - cli.Print("%s %s\n\n", dimStyle.Render(i18n.Label("registry")), registryPath) - registryDir = filepath.Dir(registryPath) - } else { - // Fallback: scan current directory - cwd, _ := os.Getwd() - reg, err = repos.ScanDirectory(cwd) - if err != nil { - return nil, "", cli.Wrap(err, "failed to scan directory") - } - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.dev.scanning_label")), cwd) - registryDir = cwd - } - } - // Load workspace config to respect packages_dir (only if config exists) - if wsConfig, err := workspace.LoadConfig(registryDir); err == nil && wsConfig != nil { - if wsConfig.PackagesDir != "" { - pkgDir := wsConfig.PackagesDir - // Expand ~ - if strings.HasPrefix(pkgDir, "~/") { - home, _ := os.UserHomeDir() - pkgDir = filepath.Join(home, pkgDir[2:]) - } - if !filepath.IsAbs(pkgDir) { - pkgDir = filepath.Join(registryDir, pkgDir) - } - - // Update repo paths - for _, repo := range reg.Repos { - repo.Path = filepath.Join(pkgDir, repo.Name) - } - } - } - - return reg, registryDir, nil -} diff --git a/pkg/dev/service.go b/pkg/dev/service.go deleted file mode 100644 index a145cd9..0000000 --- a/pkg/dev/service.go +++ /dev/null @@ -1,288 +0,0 @@ -package dev - -import ( - "context" - "sort" - "strings" - - "github.com/host-uk/core/pkg/agentic" - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/framework" - "github.com/host-uk/core/pkg/git" -) - -// Tasks for dev service - -// TaskWork runs the full dev workflow: status, commit, push. -type TaskWork struct { - RegistryPath string - StatusOnly bool - AutoCommit bool -} - -// TaskStatus displays git status for all repos. -type TaskStatus struct { - RegistryPath string -} - -// ServiceOptions for configuring the dev service. -type ServiceOptions struct { - RegistryPath string -} - -// Service provides dev workflow orchestration as a Core service. -type Service struct { - *framework.ServiceRuntime[ServiceOptions] -} - -// NewService creates a dev service factory. -func NewService(opts ServiceOptions) func(*framework.Core) (any, error) { - return func(c *framework.Core) (any, error) { - return &Service{ - ServiceRuntime: framework.NewServiceRuntime(c, opts), - }, nil - } -} - -// OnStartup registers task handlers. -func (s *Service) OnStartup(ctx context.Context) error { - s.Core().RegisterTask(s.handleTask) - return nil -} - -func (s *Service) handleTask(c *framework.Core, t framework.Task) (any, bool, error) { - switch m := t.(type) { - case TaskWork: - err := s.runWork(m) - return nil, true, err - - case TaskStatus: - err := s.runStatus(m) - return nil, true, err - } - return nil, false, nil -} - -func (s *Service) runWork(task TaskWork) error { - // Load registry - paths, names, err := s.loadRegistry(task.RegistryPath) - if err != nil { - return err - } - - if len(paths) == 0 { - cli.Println("No git repositories found") - return nil - } - - // QUERY git status - result, handled, err := s.Core().QUERY(git.QueryStatus{ - Paths: paths, - Names: names, - }) - if !handled { - return cli.Err("git service not available") - } - if err != nil { - return err - } - statuses := result.([]git.RepoStatus) - - // Sort by name - sort.Slice(statuses, func(i, j int) bool { - return statuses[i].Name < statuses[j].Name - }) - - // Display status table - s.printStatusTable(statuses) - - // Collect dirty and ahead repos - var dirtyRepos []git.RepoStatus - var aheadRepos []git.RepoStatus - - for _, st := range statuses { - if st.Error != nil { - continue - } - if st.IsDirty() { - dirtyRepos = append(dirtyRepos, st) - } - if st.HasUnpushed() { - aheadRepos = append(aheadRepos, st) - } - } - - // Auto-commit dirty repos if requested - if task.AutoCommit && len(dirtyRepos) > 0 { - cli.Blank() - cli.Println("Committing changes...") - cli.Blank() - - for _, repo := range dirtyRepos { - _, handled, err := s.Core().PERFORM(agentic.TaskCommit{ - Path: repo.Path, - Name: repo.Name, - }) - if !handled { - // Agentic service not available - skip silently - cli.Print(" - %s: agentic service not available\n", repo.Name) - continue - } - if err != nil { - cli.Print(" x %s: %s\n", repo.Name, err) - } else { - cli.Print(" v %s\n", repo.Name) - } - } - - // Re-query status after commits - result, _, _ = s.Core().QUERY(git.QueryStatus{ - Paths: paths, - Names: names, - }) - statuses = result.([]git.RepoStatus) - - // Rebuild ahead repos list - aheadRepos = nil - for _, st := range statuses { - if st.Error == nil && st.HasUnpushed() { - aheadRepos = append(aheadRepos, st) - } - } - } - - // If status only, we're done - if task.StatusOnly { - if len(dirtyRepos) > 0 && !task.AutoCommit { - cli.Blank() - cli.Println("Use --commit flag to auto-commit dirty repos") - } - return nil - } - - // Push repos with unpushed commits - if len(aheadRepos) == 0 { - cli.Blank() - cli.Println("All repositories are up to date") - return nil - } - - cli.Blank() - cli.Print("%d repos with unpushed commits:\n", len(aheadRepos)) - for _, st := range aheadRepos { - cli.Print(" %s: %d commits\n", st.Name, st.Ahead) - } - - cli.Blank() - cli.Print("Push all? [y/N] ") - var answer string - cli.Scanln(&answer) - if strings.ToLower(answer) != "y" { - cli.Println("Aborted") - return nil - } - - cli.Blank() - - // Push each repo - for _, st := range aheadRepos { - _, handled, err := s.Core().PERFORM(git.TaskPush{ - Path: st.Path, - Name: st.Name, - }) - if !handled { - cli.Print(" x %s: git service not available\n", st.Name) - continue - } - if err != nil { - if git.IsNonFastForward(err) { - cli.Print(" ! %s: branch has diverged\n", st.Name) - } else { - cli.Print(" x %s: %s\n", st.Name, err) - } - } else { - cli.Print(" v %s\n", st.Name) - } - } - - return nil -} - -func (s *Service) runStatus(task TaskStatus) error { - paths, names, err := s.loadRegistry(task.RegistryPath) - if err != nil { - return err - } - - if len(paths) == 0 { - cli.Println("No git repositories found") - return nil - } - - result, handled, err := s.Core().QUERY(git.QueryStatus{ - Paths: paths, - Names: names, - }) - if !handled { - return cli.Err("git service not available") - } - if err != nil { - return err - } - - statuses := result.([]git.RepoStatus) - sort.Slice(statuses, func(i, j int) bool { - return statuses[i].Name < statuses[j].Name - }) - - s.printStatusTable(statuses) - return nil -} - -func (s *Service) loadRegistry(registryPath string) ([]string, map[string]string, error) { - reg, _, err := loadRegistryWithConfig(registryPath) - if err != nil { - return nil, nil, err - } - - var paths []string - names := make(map[string]string) - - for _, repo := range reg.List() { - if repo.IsGitRepo() { - paths = append(paths, repo.Path) - names[repo.Path] = repo.Name - } - } - - return paths, names, nil -} - -func (s *Service) printStatusTable(statuses []git.RepoStatus) { - // Calculate column widths - nameWidth := 4 // "Repo" - for _, st := range statuses { - if len(st.Name) > nameWidth { - nameWidth = len(st.Name) - } - } - - // Print header - cli.Print("%-*s %8s %9s %6s %5s\n", - nameWidth, "Repo", "Modified", "Untracked", "Staged", "Ahead") - - // Print separator - cli.Text(strings.Repeat("-", nameWidth+2+10+11+8+7)) - - // Print rows - for _, st := range statuses { - if st.Error != nil { - cli.Print("%-*s error: %s\n", nameWidth, st.Name, st.Error) - continue - } - - cli.Print("%-*s %8d %9d %6d %5d\n", - nameWidth, st.Name, - st.Modified, st.Untracked, st.Staged, st.Ahead) - } -} diff --git a/pkg/devops/claude.go b/pkg/devops/claude.go deleted file mode 100644 index c6b8bcb..0000000 --- a/pkg/devops/claude.go +++ /dev/null @@ -1,141 +0,0 @@ -package devops - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" -) - -// ClaudeOptions configures the Claude sandbox session. -type ClaudeOptions struct { - NoAuth bool // Don't forward any auth - Auth []string // Selective auth: "gh", "anthropic", "ssh", "git" - Model string // Model to use: opus, sonnet -} - -// Claude starts a sandboxed Claude session in the dev environment. -func (d *DevOps) Claude(ctx context.Context, projectDir string, opts ClaudeOptions) error { - // Auto-boot if not running - running, err := d.IsRunning(ctx) - if err != nil { - return err - } - if !running { - fmt.Println("Dev environment not running, booting...") - if err := d.Boot(ctx, DefaultBootOptions()); err != nil { - return fmt.Errorf("failed to boot: %w", err) - } - } - - // Mount project - if err := d.mountProject(ctx, projectDir); err != nil { - return fmt.Errorf("failed to mount project: %w", err) - } - - // Prepare environment variables to forward - envVars := []string{} - - if !opts.NoAuth { - authTypes := opts.Auth - if len(authTypes) == 0 { - authTypes = []string{"gh", "anthropic", "ssh", "git"} - } - - for _, auth := range authTypes { - switch auth { - case "anthropic": - if key := os.Getenv("ANTHROPIC_API_KEY"); key != "" { - envVars = append(envVars, "ANTHROPIC_API_KEY="+key) - } - case "git": - // Forward git config - name, _ := exec.Command("git", "config", "user.name").Output() - email, _ := exec.Command("git", "config", "user.email").Output() - if len(name) > 0 { - envVars = append(envVars, "GIT_AUTHOR_NAME="+strings.TrimSpace(string(name))) - envVars = append(envVars, "GIT_COMMITTER_NAME="+strings.TrimSpace(string(name))) - } - if len(email) > 0 { - envVars = append(envVars, "GIT_AUTHOR_EMAIL="+strings.TrimSpace(string(email))) - envVars = append(envVars, "GIT_COMMITTER_EMAIL="+strings.TrimSpace(string(email))) - } - } - } - } - - // Build SSH command with agent forwarding - args := []string{ - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", - "-o", "LogLevel=ERROR", - "-A", // SSH agent forwarding - "-p", "2222", - } - - args = append(args, "root@localhost") - - // Build command to run inside - claudeCmd := "cd /app && claude" - if opts.Model != "" { - claudeCmd += " --model " + opts.Model - } - args = append(args, claudeCmd) - - // Set environment for SSH - cmd := exec.CommandContext(ctx, "ssh", args...) - cmd.Stdin = os.Stdin - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - // Pass environment variables through SSH - for _, env := range envVars { - parts := strings.SplitN(env, "=", 2) - if len(parts) == 2 { - cmd.Env = append(os.Environ(), env) - } - } - - fmt.Println("Starting Claude in sandboxed environment...") - fmt.Println("Project mounted at /app") - fmt.Println("Auth forwarded: SSH agent" + formatAuthList(opts)) - fmt.Println() - - return cmd.Run() -} - -func formatAuthList(opts ClaudeOptions) string { - if opts.NoAuth { - return " (none)" - } - if len(opts.Auth) == 0 { - return ", gh, anthropic, git" - } - return ", " + strings.Join(opts.Auth, ", ") -} - -// CopyGHAuth copies GitHub CLI auth to the VM. -func (d *DevOps) CopyGHAuth(ctx context.Context) error { - home, err := os.UserHomeDir() - if err != nil { - return err - } - - ghConfigDir := filepath.Join(home, ".config", "gh") - if _, err := os.Stat(ghConfigDir); os.IsNotExist(err) { - return nil // No gh config to copy - } - - // Use scp to copy gh config - cmd := exec.CommandContext(ctx, "scp", - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", - "-o", "LogLevel=ERROR", - "-P", "2222", - "-r", ghConfigDir, - "root@localhost:/root/.config/", - ) - return cmd.Run() -} diff --git a/pkg/devops/claude_test.go b/pkg/devops/claude_test.go deleted file mode 100644 index 6c96b9b..0000000 --- a/pkg/devops/claude_test.go +++ /dev/null @@ -1,61 +0,0 @@ -package devops - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestClaudeOptions_Default(t *testing.T) { - opts := ClaudeOptions{} - assert.False(t, opts.NoAuth) - assert.Nil(t, opts.Auth) - assert.Empty(t, opts.Model) -} - -func TestClaudeOptions_Custom(t *testing.T) { - opts := ClaudeOptions{ - NoAuth: true, - Auth: []string{"gh", "anthropic"}, - Model: "opus", - } - assert.True(t, opts.NoAuth) - assert.Equal(t, []string{"gh", "anthropic"}, opts.Auth) - assert.Equal(t, "opus", opts.Model) -} - -func TestFormatAuthList_Good_NoAuth(t *testing.T) { - opts := ClaudeOptions{NoAuth: true} - result := formatAuthList(opts) - assert.Equal(t, " (none)", result) -} - -func TestFormatAuthList_Good_Default(t *testing.T) { - opts := ClaudeOptions{} - result := formatAuthList(opts) - assert.Equal(t, ", gh, anthropic, git", result) -} - -func TestFormatAuthList_Good_CustomAuth(t *testing.T) { - opts := ClaudeOptions{ - Auth: []string{"gh"}, - } - result := formatAuthList(opts) - assert.Equal(t, ", gh", result) -} - -func TestFormatAuthList_Good_MultipleAuth(t *testing.T) { - opts := ClaudeOptions{ - Auth: []string{"gh", "ssh", "git"}, - } - result := formatAuthList(opts) - assert.Equal(t, ", gh, ssh, git", result) -} - -func TestFormatAuthList_Good_EmptyAuth(t *testing.T) { - opts := ClaudeOptions{ - Auth: []string{}, - } - result := formatAuthList(opts) - assert.Equal(t, ", gh, anthropic, git", result) -} diff --git a/pkg/devops/config.go b/pkg/devops/config.go deleted file mode 100644 index 6db1e6a..0000000 --- a/pkg/devops/config.go +++ /dev/null @@ -1,86 +0,0 @@ -package devops - -import ( - "os" - "path/filepath" - - "gopkg.in/yaml.v3" -) - -// Config holds global devops configuration from ~/.core/config.yaml. -type Config struct { - Version int `yaml:"version"` - Images ImagesConfig `yaml:"images"` -} - -// ImagesConfig holds image source configuration. -type ImagesConfig struct { - Source string `yaml:"source"` // auto, github, registry, cdn - GitHub GitHubConfig `yaml:"github,omitempty"` - Registry RegistryConfig `yaml:"registry,omitempty"` - CDN CDNConfig `yaml:"cdn,omitempty"` -} - -// GitHubConfig holds GitHub Releases configuration. -type GitHubConfig struct { - Repo string `yaml:"repo"` // owner/repo format -} - -// RegistryConfig holds container registry configuration. -type RegistryConfig struct { - Image string `yaml:"image"` // e.g., ghcr.io/host-uk/core-devops -} - -// CDNConfig holds CDN/S3 configuration. -type CDNConfig struct { - URL string `yaml:"url"` // base URL for downloads -} - -// DefaultConfig returns sensible defaults. -func DefaultConfig() *Config { - return &Config{ - Version: 1, - Images: ImagesConfig{ - Source: "auto", - GitHub: GitHubConfig{ - Repo: "host-uk/core-images", - }, - Registry: RegistryConfig{ - Image: "ghcr.io/host-uk/core-devops", - }, - }, - } -} - -// ConfigPath returns the path to the config file. -func ConfigPath() (string, error) { - home, err := os.UserHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".core", "config.yaml"), nil -} - -// LoadConfig loads configuration from ~/.core/config.yaml. -// Returns default config if file doesn't exist. -func LoadConfig() (*Config, error) { - configPath, err := ConfigPath() - if err != nil { - return DefaultConfig(), nil - } - - data, err := os.ReadFile(configPath) - if err != nil { - if os.IsNotExist(err) { - return DefaultConfig(), nil - } - return nil, err - } - - cfg := DefaultConfig() - if err := yaml.Unmarshal(data, cfg); err != nil { - return nil, err - } - - return cfg, nil -} diff --git a/pkg/devops/config_test.go b/pkg/devops/config_test.go deleted file mode 100644 index 7f1c1f5..0000000 --- a/pkg/devops/config_test.go +++ /dev/null @@ -1,254 +0,0 @@ -package devops - -import ( - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestDefaultConfig(t *testing.T) { - cfg := DefaultConfig() - assert.Equal(t, 1, cfg.Version) - assert.Equal(t, "auto", cfg.Images.Source) - assert.Equal(t, "host-uk/core-images", cfg.Images.GitHub.Repo) -} - -func TestConfigPath(t *testing.T) { - path, err := ConfigPath() - assert.NoError(t, err) - assert.Contains(t, path, ".core/config.yaml") -} - -func TestLoadConfig_Good(t *testing.T) { - t.Run("returns default if not exists", func(t *testing.T) { - // Mock HOME to a temp dir - tempHome := t.TempDir() - origHome := os.Getenv("HOME") - t.Setenv("HOME", tempHome) - defer os.Setenv("HOME", origHome) - - cfg, err := LoadConfig() - assert.NoError(t, err) - assert.Equal(t, DefaultConfig(), cfg) - }) - - t.Run("loads existing config", func(t *testing.T) { - tempHome := t.TempDir() - t.Setenv("HOME", tempHome) - - coreDir := filepath.Join(tempHome, ".core") - err := os.MkdirAll(coreDir, 0755) - require.NoError(t, err) - - configData := ` -version: 2 -images: - source: cdn - cdn: - url: https://cdn.example.com -` - err = os.WriteFile(filepath.Join(coreDir, "config.yaml"), []byte(configData), 0644) - require.NoError(t, err) - - cfg, err := LoadConfig() - assert.NoError(t, err) - assert.Equal(t, 2, cfg.Version) - assert.Equal(t, "cdn", cfg.Images.Source) - assert.Equal(t, "https://cdn.example.com", cfg.Images.CDN.URL) - }) -} - -func TestLoadConfig_Bad(t *testing.T) { - t.Run("invalid yaml", func(t *testing.T) { - tempHome := t.TempDir() - t.Setenv("HOME", tempHome) - - coreDir := filepath.Join(tempHome, ".core") - err := os.MkdirAll(coreDir, 0755) - require.NoError(t, err) - - err = os.WriteFile(filepath.Join(coreDir, "config.yaml"), []byte("invalid: yaml: :"), 0644) - require.NoError(t, err) - - _, err = LoadConfig() - assert.Error(t, err) - }) -} - -func TestConfig_Struct(t *testing.T) { - cfg := &Config{ - Version: 2, - Images: ImagesConfig{ - Source: "github", - GitHub: GitHubConfig{ - Repo: "owner/repo", - }, - Registry: RegistryConfig{ - Image: "ghcr.io/owner/image", - }, - CDN: CDNConfig{ - URL: "https://cdn.example.com", - }, - }, - } - assert.Equal(t, 2, cfg.Version) - assert.Equal(t, "github", cfg.Images.Source) - assert.Equal(t, "owner/repo", cfg.Images.GitHub.Repo) - assert.Equal(t, "ghcr.io/owner/image", cfg.Images.Registry.Image) - assert.Equal(t, "https://cdn.example.com", cfg.Images.CDN.URL) -} - -func TestDefaultConfig_Complete(t *testing.T) { - cfg := DefaultConfig() - assert.Equal(t, 1, cfg.Version) - assert.Equal(t, "auto", cfg.Images.Source) - assert.Equal(t, "host-uk/core-images", cfg.Images.GitHub.Repo) - assert.Equal(t, "ghcr.io/host-uk/core-devops", cfg.Images.Registry.Image) - assert.Empty(t, cfg.Images.CDN.URL) -} - -func TestLoadConfig_Good_PartialConfig(t *testing.T) { - tempHome := t.TempDir() - t.Setenv("HOME", tempHome) - - coreDir := filepath.Join(tempHome, ".core") - err := os.MkdirAll(coreDir, 0755) - require.NoError(t, err) - - // Config only specifies source, should merge with defaults - configData := ` -version: 1 -images: - source: github -` - err = os.WriteFile(filepath.Join(coreDir, "config.yaml"), []byte(configData), 0644) - require.NoError(t, err) - - cfg, err := LoadConfig() - assert.NoError(t, err) - assert.Equal(t, 1, cfg.Version) - assert.Equal(t, "github", cfg.Images.Source) - // Default values should be preserved - assert.Equal(t, "host-uk/core-images", cfg.Images.GitHub.Repo) -} - -func TestLoadConfig_Good_AllSourceTypes(t *testing.T) { - tests := []struct { - name string - config string - check func(*testing.T, *Config) - }{ - { - name: "github source", - config: ` -version: 1 -images: - source: github - github: - repo: custom/repo -`, - check: func(t *testing.T, cfg *Config) { - assert.Equal(t, "github", cfg.Images.Source) - assert.Equal(t, "custom/repo", cfg.Images.GitHub.Repo) - }, - }, - { - name: "cdn source", - config: ` -version: 1 -images: - source: cdn - cdn: - url: https://custom-cdn.com -`, - check: func(t *testing.T, cfg *Config) { - assert.Equal(t, "cdn", cfg.Images.Source) - assert.Equal(t, "https://custom-cdn.com", cfg.Images.CDN.URL) - }, - }, - { - name: "registry source", - config: ` -version: 1 -images: - source: registry - registry: - image: docker.io/custom/image -`, - check: func(t *testing.T, cfg *Config) { - assert.Equal(t, "registry", cfg.Images.Source) - assert.Equal(t, "docker.io/custom/image", cfg.Images.Registry.Image) - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - tempHome := t.TempDir() - t.Setenv("HOME", tempHome) - - coreDir := filepath.Join(tempHome, ".core") - err := os.MkdirAll(coreDir, 0755) - require.NoError(t, err) - - err = os.WriteFile(filepath.Join(coreDir, "config.yaml"), []byte(tt.config), 0644) - require.NoError(t, err) - - cfg, err := LoadConfig() - assert.NoError(t, err) - tt.check(t, cfg) - }) - } -} - -func TestImagesConfig_Struct(t *testing.T) { - ic := ImagesConfig{ - Source: "auto", - GitHub: GitHubConfig{Repo: "test/repo"}, - } - assert.Equal(t, "auto", ic.Source) - assert.Equal(t, "test/repo", ic.GitHub.Repo) -} - -func TestGitHubConfig_Struct(t *testing.T) { - gc := GitHubConfig{Repo: "owner/repo"} - assert.Equal(t, "owner/repo", gc.Repo) -} - -func TestRegistryConfig_Struct(t *testing.T) { - rc := RegistryConfig{Image: "ghcr.io/owner/image:latest"} - assert.Equal(t, "ghcr.io/owner/image:latest", rc.Image) -} - -func TestCDNConfig_Struct(t *testing.T) { - cc := CDNConfig{URL: "https://cdn.example.com/images"} - assert.Equal(t, "https://cdn.example.com/images", cc.URL) -} - -func TestLoadConfig_Bad_UnreadableFile(t *testing.T) { - // This test is platform-specific and may not work on all systems - // Skip if we can't test file permissions properly - if os.Getuid() == 0 { - t.Skip("Skipping permission test when running as root") - } - - tempHome := t.TempDir() - t.Setenv("HOME", tempHome) - - coreDir := filepath.Join(tempHome, ".core") - err := os.MkdirAll(coreDir, 0755) - require.NoError(t, err) - - configPath := filepath.Join(coreDir, "config.yaml") - err = os.WriteFile(configPath, []byte("version: 1"), 0000) - require.NoError(t, err) - - _, err = LoadConfig() - assert.Error(t, err) - - // Restore permissions so cleanup works - os.Chmod(configPath, 0644) -} \ No newline at end of file diff --git a/pkg/devops/devops.go b/pkg/devops/devops.go deleted file mode 100644 index 9ccffd3..0000000 --- a/pkg/devops/devops.go +++ /dev/null @@ -1,216 +0,0 @@ -// Package devops provides a portable development environment using LinuxKit images. -package devops - -import ( - "context" - "fmt" - "os" - "path/filepath" - "runtime" - "time" - - "github.com/host-uk/core/pkg/container" -) - -// DevOps manages the portable development environment. -type DevOps struct { - config *Config - images *ImageManager - container *container.LinuxKitManager -} - -// New creates a new DevOps instance. -func New() (*DevOps, error) { - cfg, err := LoadConfig() - if err != nil { - return nil, fmt.Errorf("devops.New: failed to load config: %w", err) - } - - images, err := NewImageManager(cfg) - if err != nil { - return nil, fmt.Errorf("devops.New: failed to create image manager: %w", err) - } - - mgr, err := container.NewLinuxKitManager() - if err != nil { - return nil, fmt.Errorf("devops.New: failed to create container manager: %w", err) - } - - return &DevOps{ - config: cfg, - images: images, - container: mgr, - }, nil -} - -// ImageName returns the platform-specific image name. -func ImageName() string { - return fmt.Sprintf("core-devops-%s-%s.qcow2", runtime.GOOS, runtime.GOARCH) -} - -// ImagesDir returns the path to the images directory. -func ImagesDir() (string, error) { - if dir := os.Getenv("CORE_IMAGES_DIR"); dir != "" { - return dir, nil - } - home, err := os.UserHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".core", "images"), nil -} - -// ImagePath returns the full path to the platform-specific image. -func ImagePath() (string, error) { - dir, err := ImagesDir() - if err != nil { - return "", err - } - return filepath.Join(dir, ImageName()), nil -} - -// IsInstalled checks if the dev image is installed. -func (d *DevOps) IsInstalled() bool { - path, err := ImagePath() - if err != nil { - return false - } - _, err = os.Stat(path) - return err == nil -} - -// Install downloads and installs the dev image. -func (d *DevOps) Install(ctx context.Context, progress func(downloaded, total int64)) error { - return d.images.Install(ctx, progress) -} - -// CheckUpdate checks if an update is available. -func (d *DevOps) CheckUpdate(ctx context.Context) (current, latest string, hasUpdate bool, err error) { - return d.images.CheckUpdate(ctx) -} - -// BootOptions configures how to boot the dev environment. -type BootOptions struct { - Memory int // MB, default 4096 - CPUs int // default 2 - Name string // container name - Fresh bool // destroy existing and start fresh -} - -// DefaultBootOptions returns sensible defaults. -func DefaultBootOptions() BootOptions { - return BootOptions{ - Memory: 4096, - CPUs: 2, - Name: "core-dev", - } -} - -// Boot starts the dev environment. -func (d *DevOps) Boot(ctx context.Context, opts BootOptions) error { - if !d.images.IsInstalled() { - return fmt.Errorf("dev image not installed (run 'core dev install' first)") - } - - // Check if already running - if !opts.Fresh { - running, err := d.IsRunning(ctx) - if err == nil && running { - return fmt.Errorf("dev environment already running (use 'core dev stop' first or --fresh)") - } - } - - // Stop existing if fresh - if opts.Fresh { - _ = d.Stop(ctx) - } - - imagePath, err := ImagePath() - if err != nil { - return err - } - - // Build run options for LinuxKitManager - runOpts := container.RunOptions{ - Name: opts.Name, - Memory: opts.Memory, - CPUs: opts.CPUs, - SSHPort: 2222, - Detach: true, - } - - _, err = d.container.Run(ctx, imagePath, runOpts) - return err -} - -// Stop stops the dev environment. -func (d *DevOps) Stop(ctx context.Context) error { - c, err := d.findContainer(ctx, "core-dev") - if err != nil { - return err - } - if c == nil { - return fmt.Errorf("dev environment not found") - } - return d.container.Stop(ctx, c.ID) -} - -// IsRunning checks if the dev environment is running. -func (d *DevOps) IsRunning(ctx context.Context) (bool, error) { - c, err := d.findContainer(ctx, "core-dev") - if err != nil { - return false, err - } - return c != nil && c.Status == container.StatusRunning, nil -} - -// findContainer finds a container by name. -func (d *DevOps) findContainer(ctx context.Context, name string) (*container.Container, error) { - containers, err := d.container.List(ctx) - if err != nil { - return nil, err - } - for _, c := range containers { - if c.Name == name { - return c, nil - } - } - return nil, nil -} - -// DevStatus returns information about the dev environment. -type DevStatus struct { - Installed bool - Running bool - ImageVersion string - ContainerID string - Memory int - CPUs int - SSHPort int - Uptime time.Duration -} - -// Status returns the current dev environment status. -func (d *DevOps) Status(ctx context.Context) (*DevStatus, error) { - status := &DevStatus{ - Installed: d.images.IsInstalled(), - SSHPort: 2222, - } - - if info, ok := d.images.manifest.Images[ImageName()]; ok { - status.ImageVersion = info.Version - } - - c, _ := d.findContainer(ctx, "core-dev") - if c != nil { - status.Running = c.Status == container.StatusRunning - status.ContainerID = c.ID - status.Memory = c.Memory - status.CPUs = c.CPUs - if status.Running { - status.Uptime = time.Since(c.StartedAt) - } - } - - return status, nil -} diff --git a/pkg/devops/devops_test.go b/pkg/devops/devops_test.go deleted file mode 100644 index b305d38..0000000 --- a/pkg/devops/devops_test.go +++ /dev/null @@ -1,823 +0,0 @@ -package devops - -import ( - "context" - "os" - "os/exec" - "path/filepath" - "runtime" - "testing" - "time" - - "github.com/host-uk/core/pkg/container" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestImageName(t *testing.T) { - name := ImageName() - assert.Contains(t, name, "core-devops-") - assert.Contains(t, name, runtime.GOOS) - assert.Contains(t, name, runtime.GOARCH) - assert.True(t, (name[len(name)-6:] == ".qcow2")) -} - -func TestImagesDir(t *testing.T) { - t.Run("default directory", func(t *testing.T) { - // Unset env if it exists - orig := os.Getenv("CORE_IMAGES_DIR") - os.Unsetenv("CORE_IMAGES_DIR") - defer os.Setenv("CORE_IMAGES_DIR", orig) - - dir, err := ImagesDir() - assert.NoError(t, err) - assert.Contains(t, dir, ".core/images") - }) - - t.Run("environment override", func(t *testing.T) { - customDir := "/tmp/custom-images" - t.Setenv("CORE_IMAGES_DIR", customDir) - - dir, err := ImagesDir() - assert.NoError(t, err) - assert.Equal(t, customDir, dir) - }) -} - -func TestImagePath(t *testing.T) { - customDir := "/tmp/images" - t.Setenv("CORE_IMAGES_DIR", customDir) - - path, err := ImagePath() - assert.NoError(t, err) - expected := filepath.Join(customDir, ImageName()) - assert.Equal(t, expected, path) -} - -func TestDefaultBootOptions(t *testing.T) { - opts := DefaultBootOptions() - assert.Equal(t, 4096, opts.Memory) - assert.Equal(t, 2, opts.CPUs) - assert.Equal(t, "core-dev", opts.Name) - assert.False(t, opts.Fresh) -} - -func TestIsInstalled_Bad(t *testing.T) { - t.Run("returns false for non-existent image", func(t *testing.T) { - // Point to a temp directory that is empty - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - // Create devops instance manually to avoid loading real config/images - d := &DevOps{} - assert.False(t, d.IsInstalled()) - }) -} - -func TestIsInstalled_Good(t *testing.T) { - t.Run("returns true when image exists", func(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - // Create the image file - imagePath := filepath.Join(tempDir, ImageName()) - err := os.WriteFile(imagePath, []byte("fake image data"), 0644) - require.NoError(t, err) - - d := &DevOps{} - assert.True(t, d.IsInstalled()) - }) -} - -type mockHypervisor struct{} - -func (m *mockHypervisor) Name() string { return "mock" } -func (m *mockHypervisor) Available() bool { return true } -func (m *mockHypervisor) BuildCommand(ctx context.Context, image string, opts *container.HypervisorOptions) (*exec.Cmd, error) { - return exec.Command("true"), nil -} - -func TestDevOps_Status_Good(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - // Setup mock container manager - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - // Add a fake running container - c := &container.Container{ - ID: "test-id", - Name: "core-dev", - Status: container.StatusRunning, - PID: os.Getpid(), // Use our own PID so isProcessRunning returns true - StartedAt: time.Now().Add(-time.Hour), - Memory: 2048, - CPUs: 4, - } - err = state.Add(c) - require.NoError(t, err) - - status, err := d.Status(context.Background()) - assert.NoError(t, err) - assert.NotNil(t, status) - assert.True(t, status.Running) - assert.Equal(t, "test-id", status.ContainerID) - assert.Equal(t, 2048, status.Memory) - assert.Equal(t, 4, status.CPUs) -} - -func TestDevOps_Status_Good_NotInstalled(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - status, err := d.Status(context.Background()) - assert.NoError(t, err) - assert.NotNil(t, status) - assert.False(t, status.Installed) - assert.False(t, status.Running) - assert.Equal(t, 2222, status.SSHPort) -} - -func TestDevOps_Status_Good_NoContainer(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - // Create fake image to mark as installed - imagePath := filepath.Join(tempDir, ImageName()) - err := os.WriteFile(imagePath, []byte("fake"), 0644) - require.NoError(t, err) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - status, err := d.Status(context.Background()) - assert.NoError(t, err) - assert.NotNil(t, status) - assert.True(t, status.Installed) - assert.False(t, status.Running) - assert.Empty(t, status.ContainerID) -} - -func TestDevOps_IsRunning_Good(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - c := &container.Container{ - ID: "test-id", - Name: "core-dev", - Status: container.StatusRunning, - PID: os.Getpid(), - StartedAt: time.Now(), - } - err = state.Add(c) - require.NoError(t, err) - - running, err := d.IsRunning(context.Background()) - assert.NoError(t, err) - assert.True(t, running) -} - -func TestDevOps_IsRunning_Bad_NotRunning(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - running, err := d.IsRunning(context.Background()) - assert.NoError(t, err) - assert.False(t, running) -} - -func TestDevOps_IsRunning_Bad_ContainerStopped(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - c := &container.Container{ - ID: "test-id", - Name: "core-dev", - Status: container.StatusStopped, - PID: 12345, - StartedAt: time.Now(), - } - err = state.Add(c) - require.NoError(t, err) - - running, err := d.IsRunning(context.Background()) - assert.NoError(t, err) - assert.False(t, running) -} - -func TestDevOps_findContainer_Good(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - c := &container.Container{ - ID: "test-id", - Name: "my-container", - Status: container.StatusRunning, - PID: os.Getpid(), - StartedAt: time.Now(), - } - err = state.Add(c) - require.NoError(t, err) - - found, err := d.findContainer(context.Background(), "my-container") - assert.NoError(t, err) - assert.NotNil(t, found) - assert.Equal(t, "test-id", found.ID) - assert.Equal(t, "my-container", found.Name) -} - -func TestDevOps_findContainer_Bad_NotFound(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - found, err := d.findContainer(context.Background(), "nonexistent") - assert.NoError(t, err) - assert.Nil(t, found) -} - -func TestDevOps_Stop_Bad_NotFound(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - err = d.Stop(context.Background()) - assert.Error(t, err) - assert.Contains(t, err.Error(), "not found") -} - -func TestBootOptions_Custom(t *testing.T) { - opts := BootOptions{ - Memory: 8192, - CPUs: 4, - Name: "custom-dev", - Fresh: true, - } - assert.Equal(t, 8192, opts.Memory) - assert.Equal(t, 4, opts.CPUs) - assert.Equal(t, "custom-dev", opts.Name) - assert.True(t, opts.Fresh) -} - -func TestDevStatus_Struct(t *testing.T) { - status := DevStatus{ - Installed: true, - Running: true, - ImageVersion: "v1.2.3", - ContainerID: "abc123", - Memory: 4096, - CPUs: 2, - SSHPort: 2222, - Uptime: time.Hour, - } - assert.True(t, status.Installed) - assert.True(t, status.Running) - assert.Equal(t, "v1.2.3", status.ImageVersion) - assert.Equal(t, "abc123", status.ContainerID) - assert.Equal(t, 4096, status.Memory) - assert.Equal(t, 2, status.CPUs) - assert.Equal(t, 2222, status.SSHPort) - assert.Equal(t, time.Hour, status.Uptime) -} - -func TestDevOps_Boot_Bad_NotInstalled(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - err = d.Boot(context.Background(), DefaultBootOptions()) - assert.Error(t, err) - assert.Contains(t, err.Error(), "not installed") -} - -func TestDevOps_Boot_Bad_AlreadyRunning(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - // Create fake image - imagePath := filepath.Join(tempDir, ImageName()) - err := os.WriteFile(imagePath, []byte("fake"), 0644) - require.NoError(t, err) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - // Add a running container - c := &container.Container{ - ID: "test-id", - Name: "core-dev", - Status: container.StatusRunning, - PID: os.Getpid(), - StartedAt: time.Now(), - } - err = state.Add(c) - require.NoError(t, err) - - err = d.Boot(context.Background(), DefaultBootOptions()) - assert.Error(t, err) - assert.Contains(t, err.Error(), "already running") -} - -func TestDevOps_Status_Good_WithImageVersion(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - // Create fake image - imagePath := filepath.Join(tempDir, ImageName()) - err := os.WriteFile(imagePath, []byte("fake"), 0644) - require.NoError(t, err) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - // Manually set manifest with version info - mgr.manifest.Images[ImageName()] = ImageInfo{ - Version: "v1.2.3", - Source: "test", - } - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - config: cfg, - images: mgr, - container: cm, - } - - status, err := d.Status(context.Background()) - assert.NoError(t, err) - assert.True(t, status.Installed) - assert.Equal(t, "v1.2.3", status.ImageVersion) -} - -func TestDevOps_findContainer_Good_MultipleContainers(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - // Add multiple containers - c1 := &container.Container{ - ID: "id-1", - Name: "container-1", - Status: container.StatusRunning, - PID: os.Getpid(), - StartedAt: time.Now(), - } - c2 := &container.Container{ - ID: "id-2", - Name: "container-2", - Status: container.StatusRunning, - PID: os.Getpid(), - StartedAt: time.Now(), - } - err = state.Add(c1) - require.NoError(t, err) - err = state.Add(c2) - require.NoError(t, err) - - // Find specific container - found, err := d.findContainer(context.Background(), "container-2") - assert.NoError(t, err) - assert.NotNil(t, found) - assert.Equal(t, "id-2", found.ID) -} - -func TestDevOps_Status_Good_ContainerWithUptime(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - startTime := time.Now().Add(-2 * time.Hour) - c := &container.Container{ - ID: "test-id", - Name: "core-dev", - Status: container.StatusRunning, - PID: os.Getpid(), - StartedAt: startTime, - Memory: 4096, - CPUs: 2, - } - err = state.Add(c) - require.NoError(t, err) - - status, err := d.Status(context.Background()) - assert.NoError(t, err) - assert.True(t, status.Running) - assert.GreaterOrEqual(t, status.Uptime.Hours(), float64(1)) -} - -func TestDevOps_IsRunning_Bad_DifferentContainerName(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - // Add a container with different name - c := &container.Container{ - ID: "test-id", - Name: "other-container", - Status: container.StatusRunning, - PID: os.Getpid(), - StartedAt: time.Now(), - } - err = state.Add(c) - require.NoError(t, err) - - // IsRunning looks for "core-dev", not "other-container" - running, err := d.IsRunning(context.Background()) - assert.NoError(t, err) - assert.False(t, running) -} - -func TestDevOps_Boot_Good_FreshFlag(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - // Create fake image - imagePath := filepath.Join(tempDir, ImageName()) - err := os.WriteFile(imagePath, []byte("fake"), 0644) - require.NoError(t, err) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - // Add an existing container with non-existent PID (will be seen as stopped) - c := &container.Container{ - ID: "old-id", - Name: "core-dev", - Status: container.StatusRunning, - PID: 99999999, // Non-existent PID - List() will mark it as stopped - StartedAt: time.Now(), - } - err = state.Add(c) - require.NoError(t, err) - - // Boot with Fresh=true should try to stop the existing container - // then run a new one. The mock hypervisor "succeeds" so this won't error - opts := BootOptions{ - Memory: 4096, - CPUs: 2, - Name: "core-dev", - Fresh: true, - } - err = d.Boot(context.Background(), opts) - // The mock hypervisor's Run succeeds - assert.NoError(t, err) -} - -func TestDevOps_Stop_Bad_ContainerNotRunning(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - // Add a container that's already stopped - c := &container.Container{ - ID: "test-id", - Name: "core-dev", - Status: container.StatusStopped, - PID: 99999999, - StartedAt: time.Now(), - } - err = state.Add(c) - require.NoError(t, err) - - // Stop should fail because container is not running - err = d.Stop(context.Background()) - assert.Error(t, err) - assert.Contains(t, err.Error(), "not running") -} - -func TestDevOps_Boot_Good_FreshWithNoExisting(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - // Create fake image - imagePath := filepath.Join(tempDir, ImageName()) - err := os.WriteFile(imagePath, []byte("fake"), 0644) - require.NoError(t, err) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - // Boot with Fresh=true but no existing container - opts := BootOptions{ - Memory: 4096, - CPUs: 2, - Name: "core-dev", - Fresh: true, - } - err = d.Boot(context.Background(), opts) - // The mock hypervisor succeeds - assert.NoError(t, err) -} - -func TestImageName_Format(t *testing.T) { - name := ImageName() - // Check format: core-devops-{os}-{arch}.qcow2 - assert.Contains(t, name, "core-devops-") - assert.Contains(t, name, runtime.GOOS) - assert.Contains(t, name, runtime.GOARCH) - assert.True(t, filepath.Ext(name) == ".qcow2") -} - -func TestDevOps_Install_Delegates(t *testing.T) { - // This test verifies the Install method delegates to ImageManager - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - d := &DevOps{ - images: mgr, - } - - // This will fail because no source is available, but it tests delegation - err = d.Install(context.Background(), nil) - assert.Error(t, err) -} - -func TestDevOps_CheckUpdate_Delegates(t *testing.T) { - // This test verifies the CheckUpdate method delegates to ImageManager - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - d := &DevOps{ - images: mgr, - } - - // This will fail because image not installed, but it tests delegation - _, _, _, err = d.CheckUpdate(context.Background()) - assert.Error(t, err) -} - -func TestDevOps_Boot_Good_Success(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - // Create fake image - imagePath := filepath.Join(tempDir, ImageName()) - err := os.WriteFile(imagePath, []byte("fake"), 0644) - require.NoError(t, err) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - statePath := filepath.Join(tempDir, "containers.json") - state := container.NewState(statePath) - h := &mockHypervisor{} - cm := container.NewLinuxKitManagerWithHypervisor(state, h) - - d := &DevOps{ - images: mgr, - container: cm, - } - - // Boot without Fresh flag and no existing container - opts := DefaultBootOptions() - err = d.Boot(context.Background(), opts) - assert.NoError(t, err) // Mock hypervisor succeeds -} - -func TestDevOps_Config(t *testing.T) { - tempDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tempDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - d := &DevOps{ - config: cfg, - images: mgr, - } - - assert.NotNil(t, d.config) - assert.Equal(t, "auto", d.config.Images.Source) -} diff --git a/pkg/devops/images.go b/pkg/devops/images.go deleted file mode 100644 index 2fee280..0000000 --- a/pkg/devops/images.go +++ /dev/null @@ -1,193 +0,0 @@ -package devops - -import ( - "context" - "encoding/json" - "fmt" - "os" - "path/filepath" - "time" - - "github.com/host-uk/core/pkg/devops/sources" -) - -// ImageManager handles image downloads and updates. -type ImageManager struct { - config *Config - manifest *Manifest - sources []sources.ImageSource -} - -// Manifest tracks installed images. -type Manifest struct { - Images map[string]ImageInfo `json:"images"` - path string -} - -// ImageInfo holds metadata about an installed image. -type ImageInfo struct { - Version string `json:"version"` - SHA256 string `json:"sha256,omitempty"` - Downloaded time.Time `json:"downloaded"` - Source string `json:"source"` -} - -// NewImageManager creates a new image manager. -func NewImageManager(cfg *Config) (*ImageManager, error) { - imagesDir, err := ImagesDir() - if err != nil { - return nil, err - } - - // Ensure images directory exists - if err := os.MkdirAll(imagesDir, 0755); err != nil { - return nil, err - } - - // Load or create manifest - manifestPath := filepath.Join(imagesDir, "manifest.json") - manifest, err := loadManifest(manifestPath) - if err != nil { - return nil, err - } - - // Build source list based on config - imageName := ImageName() - sourceCfg := sources.SourceConfig{ - GitHubRepo: cfg.Images.GitHub.Repo, - RegistryImage: cfg.Images.Registry.Image, - CDNURL: cfg.Images.CDN.URL, - ImageName: imageName, - } - - var srcs []sources.ImageSource - switch cfg.Images.Source { - case "github": - srcs = []sources.ImageSource{sources.NewGitHubSource(sourceCfg)} - case "cdn": - srcs = []sources.ImageSource{sources.NewCDNSource(sourceCfg)} - default: // "auto" - srcs = []sources.ImageSource{ - sources.NewGitHubSource(sourceCfg), - sources.NewCDNSource(sourceCfg), - } - } - - return &ImageManager{ - config: cfg, - manifest: manifest, - sources: srcs, - }, nil -} - -// IsInstalled checks if the dev image is installed. -func (m *ImageManager) IsInstalled() bool { - path, err := ImagePath() - if err != nil { - return false - } - _, err = os.Stat(path) - return err == nil -} - -// Install downloads and installs the dev image. -func (m *ImageManager) Install(ctx context.Context, progress func(downloaded, total int64)) error { - imagesDir, err := ImagesDir() - if err != nil { - return err - } - - // Find first available source - var src sources.ImageSource - for _, s := range m.sources { - if s.Available() { - src = s - break - } - } - if src == nil { - return fmt.Errorf("no image source available") - } - - // Get version - version, err := src.LatestVersion(ctx) - if err != nil { - return fmt.Errorf("failed to get latest version: %w", err) - } - - fmt.Printf("Downloading %s from %s...\n", ImageName(), src.Name()) - - // Download - if err := src.Download(ctx, imagesDir, progress); err != nil { - return err - } - - // Update manifest - m.manifest.Images[ImageName()] = ImageInfo{ - Version: version, - Downloaded: time.Now(), - Source: src.Name(), - } - - return m.manifest.Save() -} - -// CheckUpdate checks if an update is available. -func (m *ImageManager) CheckUpdate(ctx context.Context) (current, latest string, hasUpdate bool, err error) { - info, ok := m.manifest.Images[ImageName()] - if !ok { - return "", "", false, fmt.Errorf("image not installed") - } - current = info.Version - - // Find first available source - var src sources.ImageSource - for _, s := range m.sources { - if s.Available() { - src = s - break - } - } - if src == nil { - return current, "", false, fmt.Errorf("no image source available") - } - - latest, err = src.LatestVersion(ctx) - if err != nil { - return current, "", false, err - } - - hasUpdate = current != latest - return current, latest, hasUpdate, nil -} - -func loadManifest(path string) (*Manifest, error) { - m := &Manifest{ - Images: make(map[string]ImageInfo), - path: path, - } - - data, err := os.ReadFile(path) - if err != nil { - if os.IsNotExist(err) { - return m, nil - } - return nil, err - } - - if err := json.Unmarshal(data, m); err != nil { - return nil, err - } - m.path = path - - return m, nil -} - -// Save writes the manifest to disk. -func (m *Manifest) Save() error { - data, err := json.MarshalIndent(m, "", " ") - if err != nil { - return err - } - return os.WriteFile(m.path, data, 0644) -} diff --git a/pkg/devops/images_test.go b/pkg/devops/images_test.go deleted file mode 100644 index b186e54..0000000 --- a/pkg/devops/images_test.go +++ /dev/null @@ -1,558 +0,0 @@ -package devops - -import ( - "context" - "os" - "path/filepath" - "testing" - "time" - - "github.com/host-uk/core/pkg/devops/sources" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestImageManager_Good_IsInstalled(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - // Not installed yet - assert.False(t, mgr.IsInstalled()) - - // Create fake image - imagePath := filepath.Join(tmpDir, ImageName()) - err = os.WriteFile(imagePath, []byte("fake"), 0644) - require.NoError(t, err) - - // Now installed - assert.True(t, mgr.IsInstalled()) -} - -func TestNewImageManager_Good(t *testing.T) { - t.Run("creates manager with cdn source", func(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - cfg := DefaultConfig() - cfg.Images.Source = "cdn" - - mgr, err := NewImageManager(cfg) - assert.NoError(t, err) - assert.NotNil(t, mgr) - assert.Len(t, mgr.sources, 1) - assert.Equal(t, "cdn", mgr.sources[0].Name()) - }) - - t.Run("creates manager with github source", func(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - cfg := DefaultConfig() - cfg.Images.Source = "github" - - mgr, err := NewImageManager(cfg) - assert.NoError(t, err) - assert.NotNil(t, mgr) - assert.Len(t, mgr.sources, 1) - assert.Equal(t, "github", mgr.sources[0].Name()) - }) -} - -func TestManifest_Save(t *testing.T) { - tmpDir := t.TempDir() - path := filepath.Join(tmpDir, "manifest.json") - - m := &Manifest{ - Images: make(map[string]ImageInfo), - path: path, - } - - m.Images["test.img"] = ImageInfo{ - Version: "1.0.0", - Source: "test", - } - - err := m.Save() - assert.NoError(t, err) - - // Verify file exists and has content - _, err = os.Stat(path) - assert.NoError(t, err) - - // Reload - m2, err := loadManifest(path) - assert.NoError(t, err) - assert.Equal(t, "1.0.0", m2.Images["test.img"].Version) -} - -func TestLoadManifest_Bad(t *testing.T) { - t.Run("invalid json", func(t *testing.T) { - tmpDir := t.TempDir() - path := filepath.Join(tmpDir, "manifest.json") - err := os.WriteFile(path, []byte("invalid json"), 0644) - require.NoError(t, err) - - _, err = loadManifest(path) - assert.Error(t, err) - }) -} - -func TestCheckUpdate_Bad(t *testing.T) { - t.Run("image not installed", func(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - require.NoError(t, err) - - _, _, _, err = mgr.CheckUpdate(context.Background()) - assert.Error(t, err) - assert.Contains(t, err.Error(), "image not installed") - }) -} - -func TestNewImageManager_Good_AutoSource(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - cfg := DefaultConfig() - cfg.Images.Source = "auto" - - mgr, err := NewImageManager(cfg) - assert.NoError(t, err) - assert.NotNil(t, mgr) - assert.Len(t, mgr.sources, 2) // github and cdn -} - -func TestNewImageManager_Good_UnknownSourceFallsToAuto(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - cfg := DefaultConfig() - cfg.Images.Source = "unknown" - - mgr, err := NewImageManager(cfg) - assert.NoError(t, err) - assert.NotNil(t, mgr) - assert.Len(t, mgr.sources, 2) // falls to default (auto) which is github + cdn -} - -func TestLoadManifest_Good_Empty(t *testing.T) { - tmpDir := t.TempDir() - path := filepath.Join(tmpDir, "nonexistent.json") - - m, err := loadManifest(path) - assert.NoError(t, err) - assert.NotNil(t, m) - assert.NotNil(t, m.Images) - assert.Empty(t, m.Images) - assert.Equal(t, path, m.path) -} - -func TestLoadManifest_Good_ExistingData(t *testing.T) { - tmpDir := t.TempDir() - path := filepath.Join(tmpDir, "manifest.json") - - data := `{"images":{"test.img":{"version":"2.0.0","source":"cdn"}}}` - err := os.WriteFile(path, []byte(data), 0644) - require.NoError(t, err) - - m, err := loadManifest(path) - assert.NoError(t, err) - assert.NotNil(t, m) - assert.Equal(t, "2.0.0", m.Images["test.img"].Version) - assert.Equal(t, "cdn", m.Images["test.img"].Source) -} - -func TestImageInfo_Struct(t *testing.T) { - info := ImageInfo{ - Version: "1.0.0", - SHA256: "abc123", - Downloaded: time.Now(), - Source: "github", - } - assert.Equal(t, "1.0.0", info.Version) - assert.Equal(t, "abc123", info.SHA256) - assert.False(t, info.Downloaded.IsZero()) - assert.Equal(t, "github", info.Source) -} - -func TestManifest_Save_Good_CreatesDirs(t *testing.T) { - tmpDir := t.TempDir() - nestedPath := filepath.Join(tmpDir, "nested", "dir", "manifest.json") - - m := &Manifest{ - Images: make(map[string]ImageInfo), - path: nestedPath, - } - m.Images["test.img"] = ImageInfo{Version: "1.0.0"} - - // Should fail because nested directories don't exist - // (Save doesn't create parent directories, it just writes to path) - err := m.Save() - assert.Error(t, err) -} - -func TestManifest_Save_Good_Overwrite(t *testing.T) { - tmpDir := t.TempDir() - path := filepath.Join(tmpDir, "manifest.json") - - // First save - m1 := &Manifest{ - Images: make(map[string]ImageInfo), - path: path, - } - m1.Images["test.img"] = ImageInfo{Version: "1.0.0"} - err := m1.Save() - require.NoError(t, err) - - // Second save with different data - m2 := &Manifest{ - Images: make(map[string]ImageInfo), - path: path, - } - m2.Images["other.img"] = ImageInfo{Version: "2.0.0"} - err = m2.Save() - require.NoError(t, err) - - // Verify second data - loaded, err := loadManifest(path) - assert.NoError(t, err) - assert.Equal(t, "2.0.0", loaded.Images["other.img"].Version) - _, exists := loaded.Images["test.img"] - assert.False(t, exists) -} - -func TestImageManager_Install_Bad_NoSourceAvailable(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - // Create manager with empty sources - mgr := &ImageManager{ - config: DefaultConfig(), - manifest: &Manifest{Images: make(map[string]ImageInfo), path: filepath.Join(tmpDir, "manifest.json")}, - sources: nil, // no sources - } - - err := mgr.Install(context.Background(), nil) - assert.Error(t, err) - assert.Contains(t, err.Error(), "no image source available") -} - -func TestNewImageManager_Good_CreatesDir(t *testing.T) { - tmpDir := t.TempDir() - imagesDir := filepath.Join(tmpDir, "images") - t.Setenv("CORE_IMAGES_DIR", imagesDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - assert.NoError(t, err) - assert.NotNil(t, mgr) - - // Verify directory was created - info, err := os.Stat(imagesDir) - assert.NoError(t, err) - assert.True(t, info.IsDir()) -} - -// mockImageSource is a test helper for simulating image sources -type mockImageSource struct { - name string - available bool - latestVersion string - latestErr error - downloadErr error -} - -func (m *mockImageSource) Name() string { return m.name } -func (m *mockImageSource) Available() bool { return m.available } -func (m *mockImageSource) LatestVersion(ctx context.Context) (string, error) { - return m.latestVersion, m.latestErr -} -func (m *mockImageSource) Download(ctx context.Context, dest string, progress func(downloaded, total int64)) error { - if m.downloadErr != nil { - return m.downloadErr - } - // Create a fake image file - imagePath := filepath.Join(dest, ImageName()) - return os.WriteFile(imagePath, []byte("mock image content"), 0644) -} - -func TestImageManager_Install_Good_WithMockSource(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - mock := &mockImageSource{ - name: "mock", - available: true, - latestVersion: "v1.0.0", - } - - mgr := &ImageManager{ - config: DefaultConfig(), - manifest: &Manifest{Images: make(map[string]ImageInfo), path: filepath.Join(tmpDir, "manifest.json")}, - sources: []sources.ImageSource{mock}, - } - - err := mgr.Install(context.Background(), nil) - assert.NoError(t, err) - assert.True(t, mgr.IsInstalled()) - - // Verify manifest was updated - info, ok := mgr.manifest.Images[ImageName()] - assert.True(t, ok) - assert.Equal(t, "v1.0.0", info.Version) - assert.Equal(t, "mock", info.Source) -} - -func TestImageManager_Install_Bad_DownloadError(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - mock := &mockImageSource{ - name: "mock", - available: true, - latestVersion: "v1.0.0", - downloadErr: assert.AnError, - } - - mgr := &ImageManager{ - config: DefaultConfig(), - manifest: &Manifest{Images: make(map[string]ImageInfo), path: filepath.Join(tmpDir, "manifest.json")}, - sources: []sources.ImageSource{mock}, - } - - err := mgr.Install(context.Background(), nil) - assert.Error(t, err) -} - -func TestImageManager_Install_Bad_VersionError(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - mock := &mockImageSource{ - name: "mock", - available: true, - latestErr: assert.AnError, - } - - mgr := &ImageManager{ - config: DefaultConfig(), - manifest: &Manifest{Images: make(map[string]ImageInfo), path: filepath.Join(tmpDir, "manifest.json")}, - sources: []sources.ImageSource{mock}, - } - - err := mgr.Install(context.Background(), nil) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to get latest version") -} - -func TestImageManager_Install_Good_SkipsUnavailableSource(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - unavailableMock := &mockImageSource{ - name: "unavailable", - available: false, - } - availableMock := &mockImageSource{ - name: "available", - available: true, - latestVersion: "v2.0.0", - } - - mgr := &ImageManager{ - config: DefaultConfig(), - manifest: &Manifest{Images: make(map[string]ImageInfo), path: filepath.Join(tmpDir, "manifest.json")}, - sources: []sources.ImageSource{unavailableMock, availableMock}, - } - - err := mgr.Install(context.Background(), nil) - assert.NoError(t, err) - - // Should have used the available source - info := mgr.manifest.Images[ImageName()] - assert.Equal(t, "available", info.Source) -} - -func TestImageManager_CheckUpdate_Good_WithMockSource(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - mock := &mockImageSource{ - name: "mock", - available: true, - latestVersion: "v2.0.0", - } - - mgr := &ImageManager{ - config: DefaultConfig(), - manifest: &Manifest{ - Images: map[string]ImageInfo{ - ImageName(): {Version: "v1.0.0", Source: "mock"}, - }, - path: filepath.Join(tmpDir, "manifest.json"), - }, - sources: []sources.ImageSource{mock}, - } - - current, latest, hasUpdate, err := mgr.CheckUpdate(context.Background()) - assert.NoError(t, err) - assert.Equal(t, "v1.0.0", current) - assert.Equal(t, "v2.0.0", latest) - assert.True(t, hasUpdate) -} - -func TestImageManager_CheckUpdate_Good_NoUpdate(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - mock := &mockImageSource{ - name: "mock", - available: true, - latestVersion: "v1.0.0", - } - - mgr := &ImageManager{ - config: DefaultConfig(), - manifest: &Manifest{ - Images: map[string]ImageInfo{ - ImageName(): {Version: "v1.0.0", Source: "mock"}, - }, - path: filepath.Join(tmpDir, "manifest.json"), - }, - sources: []sources.ImageSource{mock}, - } - - current, latest, hasUpdate, err := mgr.CheckUpdate(context.Background()) - assert.NoError(t, err) - assert.Equal(t, "v1.0.0", current) - assert.Equal(t, "v1.0.0", latest) - assert.False(t, hasUpdate) -} - -func TestImageManager_CheckUpdate_Bad_NoSource(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - unavailableMock := &mockImageSource{ - name: "mock", - available: false, - } - - mgr := &ImageManager{ - config: DefaultConfig(), - manifest: &Manifest{ - Images: map[string]ImageInfo{ - ImageName(): {Version: "v1.0.0", Source: "mock"}, - }, - path: filepath.Join(tmpDir, "manifest.json"), - }, - sources: []sources.ImageSource{unavailableMock}, - } - - _, _, _, err := mgr.CheckUpdate(context.Background()) - assert.Error(t, err) - assert.Contains(t, err.Error(), "no image source available") -} - -func TestImageManager_CheckUpdate_Bad_VersionError(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - mock := &mockImageSource{ - name: "mock", - available: true, - latestErr: assert.AnError, - } - - mgr := &ImageManager{ - config: DefaultConfig(), - manifest: &Manifest{ - Images: map[string]ImageInfo{ - ImageName(): {Version: "v1.0.0", Source: "mock"}, - }, - path: filepath.Join(tmpDir, "manifest.json"), - }, - sources: []sources.ImageSource{mock}, - } - - current, _, _, err := mgr.CheckUpdate(context.Background()) - assert.Error(t, err) - assert.Equal(t, "v1.0.0", current) // Current should still be returned -} - -func TestImageManager_Install_Bad_EmptySources(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - mgr := &ImageManager{ - config: DefaultConfig(), - manifest: &Manifest{Images: make(map[string]ImageInfo), path: filepath.Join(tmpDir, "manifest.json")}, - sources: []sources.ImageSource{}, // Empty slice, not nil - } - - err := mgr.Install(context.Background(), nil) - assert.Error(t, err) - assert.Contains(t, err.Error(), "no image source available") -} - -func TestImageManager_Install_Bad_AllUnavailable(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - mock1 := &mockImageSource{name: "mock1", available: false} - mock2 := &mockImageSource{name: "mock2", available: false} - - mgr := &ImageManager{ - config: DefaultConfig(), - manifest: &Manifest{Images: make(map[string]ImageInfo), path: filepath.Join(tmpDir, "manifest.json")}, - sources: []sources.ImageSource{mock1, mock2}, - } - - err := mgr.Install(context.Background(), nil) - assert.Error(t, err) - assert.Contains(t, err.Error(), "no image source available") -} - -func TestImageManager_CheckUpdate_Good_FirstSourceUnavailable(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - unavailable := &mockImageSource{name: "unavailable", available: false} - available := &mockImageSource{name: "available", available: true, latestVersion: "v2.0.0"} - - mgr := &ImageManager{ - config: DefaultConfig(), - manifest: &Manifest{ - Images: map[string]ImageInfo{ - ImageName(): {Version: "v1.0.0", Source: "available"}, - }, - path: filepath.Join(tmpDir, "manifest.json"), - }, - sources: []sources.ImageSource{unavailable, available}, - } - - current, latest, hasUpdate, err := mgr.CheckUpdate(context.Background()) - assert.NoError(t, err) - assert.Equal(t, "v1.0.0", current) - assert.Equal(t, "v2.0.0", latest) - assert.True(t, hasUpdate) -} - -func TestManifest_Struct(t *testing.T) { - m := &Manifest{ - Images: map[string]ImageInfo{ - "test.img": {Version: "1.0.0"}, - }, - path: "/path/to/manifest.json", - } - assert.Equal(t, "/path/to/manifest.json", m.path) - assert.Len(t, m.Images, 1) - assert.Equal(t, "1.0.0", m.Images["test.img"].Version) -} \ No newline at end of file diff --git a/pkg/devops/serve.go b/pkg/devops/serve.go deleted file mode 100644 index 7d3cacd..0000000 --- a/pkg/devops/serve.go +++ /dev/null @@ -1,107 +0,0 @@ -package devops - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" -) - -// ServeOptions configures the dev server. -type ServeOptions struct { - Port int // Port to serve on (default 8000) - Path string // Subdirectory to serve (default: current dir) -} - -// Serve mounts the project and starts a dev server. -func (d *DevOps) Serve(ctx context.Context, projectDir string, opts ServeOptions) error { - running, err := d.IsRunning(ctx) - if err != nil { - return err - } - if !running { - return fmt.Errorf("dev environment not running (run 'core dev boot' first)") - } - - if opts.Port == 0 { - opts.Port = 8000 - } - - servePath := projectDir - if opts.Path != "" { - servePath = filepath.Join(projectDir, opts.Path) - } - - // Mount project directory via SSHFS - if err := d.mountProject(ctx, servePath); err != nil { - return fmt.Errorf("failed to mount project: %w", err) - } - - // Detect and run serve command - serveCmd := DetectServeCommand(servePath) - fmt.Printf("Starting server: %s\n", serveCmd) - fmt.Printf("Listening on http://localhost:%d\n", opts.Port) - - // Run serve command via SSH - return d.sshShell(ctx, []string{"cd", "/app", "&&", serveCmd}) -} - -// mountProject mounts a directory into the VM via SSHFS. -func (d *DevOps) mountProject(ctx context.Context, path string) error { - absPath, err := filepath.Abs(path) - if err != nil { - return err - } - - // Use reverse SSHFS mount - // The VM connects back to host to mount the directory - cmd := exec.CommandContext(ctx, "ssh", - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", - "-o", "LogLevel=ERROR", - "-R", "10000:localhost:22", // Reverse tunnel for SSHFS - "-p", "2222", - "root@localhost", - fmt.Sprintf("mkdir -p /app && sshfs -p 10000 %s@localhost:%s /app -o allow_other", os.Getenv("USER"), absPath), - ) - return cmd.Run() -} - -// DetectServeCommand auto-detects the serve command for a project. -func DetectServeCommand(projectDir string) string { - // Laravel/Octane - if hasFile(projectDir, "artisan") { - return "php artisan octane:start --host=0.0.0.0 --port=8000" - } - - // Node.js with dev script - if hasFile(projectDir, "package.json") { - if hasPackageScript(projectDir, "dev") { - return "npm run dev -- --host 0.0.0.0" - } - if hasPackageScript(projectDir, "start") { - return "npm start" - } - } - - // PHP with composer - if hasFile(projectDir, "composer.json") { - return "frankenphp php-server -l :8000" - } - - // Go - if hasFile(projectDir, "go.mod") { - if hasFile(projectDir, "main.go") { - return "go run ." - } - } - - // Python Django - if hasFile(projectDir, "manage.py") { - return "python manage.py runserver 0.0.0.0:8000" - } - - // Fallback: simple HTTP server - return "python3 -m http.server 8000" -} diff --git a/pkg/devops/serve_test.go b/pkg/devops/serve_test.go deleted file mode 100644 index 3ccb78f..0000000 --- a/pkg/devops/serve_test.go +++ /dev/null @@ -1,136 +0,0 @@ -package devops - -import ( - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestDetectServeCommand_Good_Laravel(t *testing.T) { - tmpDir := t.TempDir() - err := os.WriteFile(filepath.Join(tmpDir, "artisan"), []byte("#!/usr/bin/env php"), 0644) - assert.NoError(t, err) - - cmd := DetectServeCommand(tmpDir) - assert.Equal(t, "php artisan octane:start --host=0.0.0.0 --port=8000", cmd) -} - -func TestDetectServeCommand_Good_NodeDev(t *testing.T) { - tmpDir := t.TempDir() - packageJSON := `{"scripts":{"dev":"vite","start":"node index.js"}}` - err := os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(packageJSON), 0644) - assert.NoError(t, err) - - cmd := DetectServeCommand(tmpDir) - assert.Equal(t, "npm run dev -- --host 0.0.0.0", cmd) -} - -func TestDetectServeCommand_Good_NodeStart(t *testing.T) { - tmpDir := t.TempDir() - packageJSON := `{"scripts":{"start":"node server.js"}}` - err := os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(packageJSON), 0644) - assert.NoError(t, err) - - cmd := DetectServeCommand(tmpDir) - assert.Equal(t, "npm start", cmd) -} - -func TestDetectServeCommand_Good_PHP(t *testing.T) { - tmpDir := t.TempDir() - err := os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"require":{}}`), 0644) - assert.NoError(t, err) - - cmd := DetectServeCommand(tmpDir) - assert.Equal(t, "frankenphp php-server -l :8000", cmd) -} - -func TestDetectServeCommand_Good_GoMain(t *testing.T) { - tmpDir := t.TempDir() - err := os.WriteFile(filepath.Join(tmpDir, "go.mod"), []byte("module example"), 0644) - assert.NoError(t, err) - err = os.WriteFile(filepath.Join(tmpDir, "main.go"), []byte("package main"), 0644) - assert.NoError(t, err) - - cmd := DetectServeCommand(tmpDir) - assert.Equal(t, "go run .", cmd) -} - -func TestDetectServeCommand_Good_GoWithoutMain(t *testing.T) { - tmpDir := t.TempDir() - err := os.WriteFile(filepath.Join(tmpDir, "go.mod"), []byte("module example"), 0644) - assert.NoError(t, err) - - // No main.go, so falls through to fallback - cmd := DetectServeCommand(tmpDir) - assert.Equal(t, "python3 -m http.server 8000", cmd) -} - -func TestDetectServeCommand_Good_Django(t *testing.T) { - tmpDir := t.TempDir() - err := os.WriteFile(filepath.Join(tmpDir, "manage.py"), []byte("#!/usr/bin/env python"), 0644) - assert.NoError(t, err) - - cmd := DetectServeCommand(tmpDir) - assert.Equal(t, "python manage.py runserver 0.0.0.0:8000", cmd) -} - -func TestDetectServeCommand_Good_Fallback(t *testing.T) { - tmpDir := t.TempDir() - - cmd := DetectServeCommand(tmpDir) - assert.Equal(t, "python3 -m http.server 8000", cmd) -} - -func TestDetectServeCommand_Good_Priority(t *testing.T) { - // Laravel (artisan) should take priority over PHP (composer.json) - tmpDir := t.TempDir() - err := os.WriteFile(filepath.Join(tmpDir, "artisan"), []byte("#!/usr/bin/env php"), 0644) - assert.NoError(t, err) - err = os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"require":{}}`), 0644) - assert.NoError(t, err) - - cmd := DetectServeCommand(tmpDir) - assert.Equal(t, "php artisan octane:start --host=0.0.0.0 --port=8000", cmd) -} - -func TestServeOptions_Default(t *testing.T) { - opts := ServeOptions{} - assert.Equal(t, 0, opts.Port) - assert.Equal(t, "", opts.Path) -} - -func TestServeOptions_Custom(t *testing.T) { - opts := ServeOptions{ - Port: 3000, - Path: "public", - } - assert.Equal(t, 3000, opts.Port) - assert.Equal(t, "public", opts.Path) -} - -func TestHasFile_Good(t *testing.T) { - tmpDir := t.TempDir() - testFile := filepath.Join(tmpDir, "test.txt") - err := os.WriteFile(testFile, []byte("content"), 0644) - assert.NoError(t, err) - - assert.True(t, hasFile(tmpDir, "test.txt")) -} - -func TestHasFile_Bad(t *testing.T) { - tmpDir := t.TempDir() - - assert.False(t, hasFile(tmpDir, "nonexistent.txt")) -} - -func TestHasFile_Bad_Directory(t *testing.T) { - tmpDir := t.TempDir() - subDir := filepath.Join(tmpDir, "subdir") - err := os.Mkdir(subDir, 0755) - assert.NoError(t, err) - - // hasFile returns true for directories too (it's just checking existence) - assert.True(t, hasFile(tmpDir, "subdir")) -} diff --git a/pkg/devops/shell.go b/pkg/devops/shell.go deleted file mode 100644 index fc343d8..0000000 --- a/pkg/devops/shell.go +++ /dev/null @@ -1,74 +0,0 @@ -package devops - -import ( - "context" - "fmt" - "os" - "os/exec" -) - -// ShellOptions configures the shell connection. -type ShellOptions struct { - Console bool // Use serial console instead of SSH - Command []string // Command to run (empty = interactive shell) -} - -// Shell connects to the dev environment. -func (d *DevOps) Shell(ctx context.Context, opts ShellOptions) error { - running, err := d.IsRunning(ctx) - if err != nil { - return err - } - if !running { - return fmt.Errorf("dev environment not running (run 'core dev boot' first)") - } - - if opts.Console { - return d.serialConsole(ctx) - } - - return d.sshShell(ctx, opts.Command) -} - -// sshShell connects via SSH. -func (d *DevOps) sshShell(ctx context.Context, command []string) error { - args := []string{ - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", - "-o", "LogLevel=ERROR", - "-A", // Agent forwarding - "-p", "2222", - "root@localhost", - } - - if len(command) > 0 { - args = append(args, command...) - } - - cmd := exec.CommandContext(ctx, "ssh", args...) - cmd.Stdin = os.Stdin - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - return cmd.Run() -} - -// serialConsole attaches to the QEMU serial console. -func (d *DevOps) serialConsole(ctx context.Context) error { - // Find the container to get its console socket - c, err := d.findContainer(ctx, "core-dev") - if err != nil { - return err - } - if c == nil { - return fmt.Errorf("console not available: container not found") - } - - // Use socat to connect to the console socket - socketPath := fmt.Sprintf("/tmp/core-%s-console.sock", c.ID) - cmd := exec.CommandContext(ctx, "socat", "-,raw,echo=0", "unix-connect:"+socketPath) - cmd.Stdin = os.Stdin - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - return cmd.Run() -} diff --git a/pkg/devops/shell_test.go b/pkg/devops/shell_test.go deleted file mode 100644 index e065a78..0000000 --- a/pkg/devops/shell_test.go +++ /dev/null @@ -1,47 +0,0 @@ -package devops - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestShellOptions_Default(t *testing.T) { - opts := ShellOptions{} - assert.False(t, opts.Console) - assert.Nil(t, opts.Command) -} - -func TestShellOptions_Console(t *testing.T) { - opts := ShellOptions{ - Console: true, - } - assert.True(t, opts.Console) - assert.Nil(t, opts.Command) -} - -func TestShellOptions_Command(t *testing.T) { - opts := ShellOptions{ - Command: []string{"ls", "-la"}, - } - assert.False(t, opts.Console) - assert.Equal(t, []string{"ls", "-la"}, opts.Command) -} - -func TestShellOptions_ConsoleWithCommand(t *testing.T) { - opts := ShellOptions{ - Console: true, - Command: []string{"echo", "hello"}, - } - assert.True(t, opts.Console) - assert.Equal(t, []string{"echo", "hello"}, opts.Command) -} - -func TestShellOptions_EmptyCommand(t *testing.T) { - opts := ShellOptions{ - Command: []string{}, - } - assert.False(t, opts.Console) - assert.Empty(t, opts.Command) - assert.Len(t, opts.Command, 0) -} diff --git a/pkg/devops/sources/cdn.go b/pkg/devops/sources/cdn.go deleted file mode 100644 index 851fe0e..0000000 --- a/pkg/devops/sources/cdn.go +++ /dev/null @@ -1,111 +0,0 @@ -package sources - -import ( - "context" - "fmt" - "io" - "net/http" - "os" - "path/filepath" -) - -// CDNSource downloads images from a CDN or S3 bucket. -type CDNSource struct { - config SourceConfig -} - -// Compile-time interface check. -var _ ImageSource = (*CDNSource)(nil) - -// NewCDNSource creates a new CDN source. -func NewCDNSource(cfg SourceConfig) *CDNSource { - return &CDNSource{config: cfg} -} - -// Name returns "cdn". -func (s *CDNSource) Name() string { - return "cdn" -} - -// Available checks if CDN URL is configured. -func (s *CDNSource) Available() bool { - return s.config.CDNURL != "" -} - -// LatestVersion fetches version from manifest or returns "latest". -func (s *CDNSource) LatestVersion(ctx context.Context) (string, error) { - // Try to fetch manifest.json for version info - url := fmt.Sprintf("%s/manifest.json", s.config.CDNURL) - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return "latest", nil - } - - resp, err := http.DefaultClient.Do(req) - if err != nil || resp.StatusCode != 200 { - return "latest", nil - } - defer resp.Body.Close() - - // For now, just return latest - could parse manifest for version - return "latest", nil -} - -// Download downloads the image from CDN. -func (s *CDNSource) Download(ctx context.Context, dest string, progress func(downloaded, total int64)) error { - url := fmt.Sprintf("%s/%s", s.config.CDNURL, s.config.ImageName) - - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return fmt.Errorf("cdn.Download: %w", err) - } - - resp, err := http.DefaultClient.Do(req) - if err != nil { - return fmt.Errorf("cdn.Download: %w", err) - } - defer resp.Body.Close() - - if resp.StatusCode != 200 { - return fmt.Errorf("cdn.Download: HTTP %d", resp.StatusCode) - } - - // Ensure dest directory exists - if err := os.MkdirAll(dest, 0755); err != nil { - return fmt.Errorf("cdn.Download: %w", err) - } - - // Create destination file - destPath := filepath.Join(dest, s.config.ImageName) - f, err := os.Create(destPath) - if err != nil { - return fmt.Errorf("cdn.Download: %w", err) - } - defer f.Close() - - // Copy with progress - total := resp.ContentLength - var downloaded int64 - - buf := make([]byte, 32*1024) - for { - n, err := resp.Body.Read(buf) - if n > 0 { - if _, werr := f.Write(buf[:n]); werr != nil { - return fmt.Errorf("cdn.Download: %w", werr) - } - downloaded += int64(n) - if progress != nil { - progress(downloaded, total) - } - } - if err == io.EOF { - break - } - if err != nil { - return fmt.Errorf("cdn.Download: %w", err) - } - } - - return nil -} diff --git a/pkg/devops/sources/cdn_test.go b/pkg/devops/sources/cdn_test.go deleted file mode 100644 index 52996ae..0000000 --- a/pkg/devops/sources/cdn_test.go +++ /dev/null @@ -1,305 +0,0 @@ -package sources - -import ( - "context" - "fmt" - "net/http" - "net/http/httptest" - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestCDNSource_Good_Available(t *testing.T) { - src := NewCDNSource(SourceConfig{ - CDNURL: "https://images.example.com", - ImageName: "core-devops-darwin-arm64.qcow2", - }) - - assert.Equal(t, "cdn", src.Name()) - assert.True(t, src.Available()) -} - -func TestCDNSource_Bad_NoURL(t *testing.T) { - src := NewCDNSource(SourceConfig{ - ImageName: "core-devops-darwin-arm64.qcow2", - }) - - assert.False(t, src.Available()) -} - -func TestCDNSource_LatestVersion_Good(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if r.URL.Path == "/manifest.json" { - w.WriteHeader(http.StatusOK) - fmt.Fprint(w, `{"version": "1.2.3"}`) - } else { - w.WriteHeader(http.StatusNotFound) - } - })) - defer server.Close() - - src := NewCDNSource(SourceConfig{ - CDNURL: server.URL, - ImageName: "test.img", - }) - - version, err := src.LatestVersion(context.Background()) - assert.NoError(t, err) - assert.Equal(t, "latest", version) // Current impl always returns "latest" -} - -func TestCDNSource_Download_Good(t *testing.T) { - content := "fake image data" - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if r.URL.Path == "/test.img" { - w.WriteHeader(http.StatusOK) - fmt.Fprint(w, content) - } else { - w.WriteHeader(http.StatusNotFound) - } - })) - defer server.Close() - - dest := t.TempDir() - imageName := "test.img" - src := NewCDNSource(SourceConfig{ - CDNURL: server.URL, - ImageName: imageName, - }) - - var progressCalled bool - err := src.Download(context.Background(), dest, func(downloaded, total int64) { - progressCalled = true - }) - - assert.NoError(t, err) - assert.True(t, progressCalled) - - // Verify file content - data, err := os.ReadFile(filepath.Join(dest, imageName)) - assert.NoError(t, err) - assert.Equal(t, content, string(data)) -} - -func TestCDNSource_Download_Bad(t *testing.T) { - t.Run("HTTP error", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusInternalServerError) - })) - defer server.Close() - - dest := t.TempDir() - src := NewCDNSource(SourceConfig{ - CDNURL: server.URL, - ImageName: "test.img", - }) - - err := src.Download(context.Background(), dest, nil) - assert.Error(t, err) - assert.Contains(t, err.Error(), "HTTP 500") - }) - - t.Run("Invalid URL", func(t *testing.T) { - dest := t.TempDir() - src := NewCDNSource(SourceConfig{ - CDNURL: "http://invalid-url-that-should-fail", - ImageName: "test.img", - }) - - err := src.Download(context.Background(), dest, nil) - assert.Error(t, err) - }) -} - -func TestCDNSource_LatestVersion_Bad_NoManifest(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusNotFound) - })) - defer server.Close() - - src := NewCDNSource(SourceConfig{ - CDNURL: server.URL, - ImageName: "test.img", - }) - - version, err := src.LatestVersion(context.Background()) - assert.NoError(t, err) // Should not error, just return "latest" - assert.Equal(t, "latest", version) -} - -func TestCDNSource_LatestVersion_Bad_ServerError(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusInternalServerError) - })) - defer server.Close() - - src := NewCDNSource(SourceConfig{ - CDNURL: server.URL, - ImageName: "test.img", - }) - - version, err := src.LatestVersion(context.Background()) - assert.NoError(t, err) // Falls back to "latest" - assert.Equal(t, "latest", version) -} - -func TestCDNSource_Download_Good_NoProgress(t *testing.T) { - content := "test content" - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Header().Set("Content-Length", fmt.Sprintf("%d", len(content))) - w.WriteHeader(http.StatusOK) - fmt.Fprint(w, content) - })) - defer server.Close() - - dest := t.TempDir() - src := NewCDNSource(SourceConfig{ - CDNURL: server.URL, - ImageName: "test.img", - }) - - // nil progress callback should be handled gracefully - err := src.Download(context.Background(), dest, nil) - assert.NoError(t, err) - - data, err := os.ReadFile(filepath.Join(dest, "test.img")) - assert.NoError(t, err) - assert.Equal(t, content, string(data)) -} - -func TestCDNSource_Download_Good_LargeFile(t *testing.T) { - // Create content larger than buffer size (32KB) - content := make([]byte, 64*1024) // 64KB - for i := range content { - content[i] = byte(i % 256) - } - - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Header().Set("Content-Length", fmt.Sprintf("%d", len(content))) - w.WriteHeader(http.StatusOK) - w.Write(content) - })) - defer server.Close() - - dest := t.TempDir() - src := NewCDNSource(SourceConfig{ - CDNURL: server.URL, - ImageName: "large.img", - }) - - var progressCalls int - var lastDownloaded int64 - err := src.Download(context.Background(), dest, func(downloaded, total int64) { - progressCalls++ - lastDownloaded = downloaded - }) - - assert.NoError(t, err) - assert.Greater(t, progressCalls, 1) // Should be called multiple times for large file - assert.Equal(t, int64(len(content)), lastDownloaded) -} - -func TestCDNSource_Download_Bad_HTTPErrorCodes(t *testing.T) { - testCases := []struct { - name string - statusCode int - }{ - {"Bad Request", http.StatusBadRequest}, - {"Unauthorized", http.StatusUnauthorized}, - {"Forbidden", http.StatusForbidden}, - {"Not Found", http.StatusNotFound}, - {"Service Unavailable", http.StatusServiceUnavailable}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(tc.statusCode) - })) - defer server.Close() - - dest := t.TempDir() - src := NewCDNSource(SourceConfig{ - CDNURL: server.URL, - ImageName: "test.img", - }) - - err := src.Download(context.Background(), dest, nil) - assert.Error(t, err) - assert.Contains(t, err.Error(), fmt.Sprintf("HTTP %d", tc.statusCode)) - }) - } -} - -func TestCDNSource_InterfaceCompliance(t *testing.T) { - // Verify CDNSource implements ImageSource - var _ ImageSource = (*CDNSource)(nil) -} - -func TestCDNSource_Config(t *testing.T) { - cfg := SourceConfig{ - CDNURL: "https://cdn.example.com", - ImageName: "my-image.qcow2", - } - src := NewCDNSource(cfg) - - assert.Equal(t, "https://cdn.example.com", src.config.CDNURL) - assert.Equal(t, "my-image.qcow2", src.config.ImageName) -} - -func TestNewCDNSource_Good(t *testing.T) { - cfg := SourceConfig{ - GitHubRepo: "host-uk/core-images", - RegistryImage: "ghcr.io/host-uk/core-devops", - CDNURL: "https://cdn.example.com", - ImageName: "core-devops-darwin-arm64.qcow2", - } - - src := NewCDNSource(cfg) - assert.NotNil(t, src) - assert.Equal(t, "cdn", src.Name()) - assert.Equal(t, cfg.CDNURL, src.config.CDNURL) -} - -func TestCDNSource_Download_Good_CreatesDestDir(t *testing.T) { - content := "test content" - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusOK) - fmt.Fprint(w, content) - })) - defer server.Close() - - tmpDir := t.TempDir() - dest := filepath.Join(tmpDir, "nested", "dir") - // dest doesn't exist yet - - src := NewCDNSource(SourceConfig{ - CDNURL: server.URL, - ImageName: "test.img", - }) - - err := src.Download(context.Background(), dest, nil) - assert.NoError(t, err) - - // Verify nested dir was created - info, err := os.Stat(dest) - assert.NoError(t, err) - assert.True(t, info.IsDir()) -} - -func TestSourceConfig_Struct(t *testing.T) { - cfg := SourceConfig{ - GitHubRepo: "owner/repo", - RegistryImage: "ghcr.io/owner/image", - CDNURL: "https://cdn.example.com", - ImageName: "image.qcow2", - } - - assert.Equal(t, "owner/repo", cfg.GitHubRepo) - assert.Equal(t, "ghcr.io/owner/image", cfg.RegistryImage) - assert.Equal(t, "https://cdn.example.com", cfg.CDNURL) - assert.Equal(t, "image.qcow2", cfg.ImageName) -} \ No newline at end of file diff --git a/pkg/devops/sources/github.go b/pkg/devops/sources/github.go deleted file mode 100644 index 98a86b6..0000000 --- a/pkg/devops/sources/github.go +++ /dev/null @@ -1,70 +0,0 @@ -package sources - -import ( - "context" - "fmt" - "os" - "os/exec" - "strings" -) - -// GitHubSource downloads images from GitHub Releases. -type GitHubSource struct { - config SourceConfig -} - -// Compile-time interface check. -var _ ImageSource = (*GitHubSource)(nil) - -// NewGitHubSource creates a new GitHub source. -func NewGitHubSource(cfg SourceConfig) *GitHubSource { - return &GitHubSource{config: cfg} -} - -// Name returns "github". -func (s *GitHubSource) Name() string { - return "github" -} - -// Available checks if gh CLI is installed and authenticated. -func (s *GitHubSource) Available() bool { - _, err := exec.LookPath("gh") - if err != nil { - return false - } - // Check if authenticated - cmd := exec.Command("gh", "auth", "status") - return cmd.Run() == nil -} - -// LatestVersion returns the latest release tag. -func (s *GitHubSource) LatestVersion(ctx context.Context) (string, error) { - cmd := exec.CommandContext(ctx, "gh", "release", "view", - "-R", s.config.GitHubRepo, - "--json", "tagName", - "-q", ".tagName", - ) - out, err := cmd.Output() - if err != nil { - return "", fmt.Errorf("github.LatestVersion: %w", err) - } - return strings.TrimSpace(string(out)), nil -} - -// Download downloads the image from the latest release. -func (s *GitHubSource) Download(ctx context.Context, dest string, progress func(downloaded, total int64)) error { - // Get release assets to find our image - cmd := exec.CommandContext(ctx, "gh", "release", "download", - "-R", s.config.GitHubRepo, - "-p", s.config.ImageName, - "-D", dest, - "--clobber", - ) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - if err := cmd.Run(); err != nil { - return fmt.Errorf("github.Download: %w", err) - } - return nil -} diff --git a/pkg/devops/sources/github_test.go b/pkg/devops/sources/github_test.go deleted file mode 100644 index 7281129..0000000 --- a/pkg/devops/sources/github_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package sources - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGitHubSource_Good_Available(t *testing.T) { - src := NewGitHubSource(SourceConfig{ - GitHubRepo: "host-uk/core-images", - ImageName: "core-devops-darwin-arm64.qcow2", - }) - - if src.Name() != "github" { - t.Errorf("expected name 'github', got %q", src.Name()) - } - - // Available depends on gh CLI being installed - _ = src.Available() -} - -func TestGitHubSource_Name(t *testing.T) { - src := NewGitHubSource(SourceConfig{}) - assert.Equal(t, "github", src.Name()) -} - -func TestGitHubSource_Config(t *testing.T) { - cfg := SourceConfig{ - GitHubRepo: "owner/repo", - ImageName: "test-image.qcow2", - } - src := NewGitHubSource(cfg) - - // Verify the config is stored - assert.Equal(t, "owner/repo", src.config.GitHubRepo) - assert.Equal(t, "test-image.qcow2", src.config.ImageName) -} - -func TestGitHubSource_Good_Multiple(t *testing.T) { - // Test creating multiple sources with different configs - src1 := NewGitHubSource(SourceConfig{GitHubRepo: "org1/repo1", ImageName: "img1.qcow2"}) - src2 := NewGitHubSource(SourceConfig{GitHubRepo: "org2/repo2", ImageName: "img2.qcow2"}) - - assert.Equal(t, "org1/repo1", src1.config.GitHubRepo) - assert.Equal(t, "org2/repo2", src2.config.GitHubRepo) - assert.Equal(t, "github", src1.Name()) - assert.Equal(t, "github", src2.Name()) -} - -func TestNewGitHubSource_Good(t *testing.T) { - cfg := SourceConfig{ - GitHubRepo: "host-uk/core-images", - RegistryImage: "ghcr.io/host-uk/core-devops", - CDNURL: "https://cdn.example.com", - ImageName: "core-devops-darwin-arm64.qcow2", - } - - src := NewGitHubSource(cfg) - assert.NotNil(t, src) - assert.Equal(t, "github", src.Name()) - assert.Equal(t, cfg.GitHubRepo, src.config.GitHubRepo) -} - -func TestGitHubSource_InterfaceCompliance(t *testing.T) { - // Verify GitHubSource implements ImageSource - var _ ImageSource = (*GitHubSource)(nil) -} diff --git a/pkg/devops/sources/source.go b/pkg/devops/sources/source.go deleted file mode 100644 index 94e4ff6..0000000 --- a/pkg/devops/sources/source.go +++ /dev/null @@ -1,31 +0,0 @@ -// Package sources provides image download sources for core-devops. -package sources - -import ( - "context" -) - -// ImageSource defines the interface for downloading dev images. -type ImageSource interface { - // Name returns the source identifier. - Name() string - // Available checks if this source can be used. - Available() bool - // LatestVersion returns the latest available version. - LatestVersion(ctx context.Context) (string, error) - // Download downloads the image to the destination path. - // Reports progress via the callback if provided. - Download(ctx context.Context, dest string, progress func(downloaded, total int64)) error -} - -// SourceConfig holds configuration for a source. -type SourceConfig struct { - // GitHub configuration - GitHubRepo string - // Registry configuration - RegistryImage string - // CDN configuration - CDNURL string - // Image name (e.g., core-devops-darwin-arm64.qcow2) - ImageName string -} diff --git a/pkg/devops/sources/source_test.go b/pkg/devops/sources/source_test.go deleted file mode 100644 index a63f09b..0000000 --- a/pkg/devops/sources/source_test.go +++ /dev/null @@ -1,35 +0,0 @@ -package sources - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestSourceConfig_Empty(t *testing.T) { - cfg := SourceConfig{} - assert.Empty(t, cfg.GitHubRepo) - assert.Empty(t, cfg.RegistryImage) - assert.Empty(t, cfg.CDNURL) - assert.Empty(t, cfg.ImageName) -} - -func TestSourceConfig_Complete(t *testing.T) { - cfg := SourceConfig{ - GitHubRepo: "owner/repo", - RegistryImage: "ghcr.io/owner/image:v1", - CDNURL: "https://cdn.example.com/images", - ImageName: "my-image-darwin-arm64.qcow2", - } - - assert.Equal(t, "owner/repo", cfg.GitHubRepo) - assert.Equal(t, "ghcr.io/owner/image:v1", cfg.RegistryImage) - assert.Equal(t, "https://cdn.example.com/images", cfg.CDNURL) - assert.Equal(t, "my-image-darwin-arm64.qcow2", cfg.ImageName) -} - -func TestImageSource_Interface(t *testing.T) { - // Ensure both sources implement the interface - var _ ImageSource = (*GitHubSource)(nil) - var _ ImageSource = (*CDNSource)(nil) -} diff --git a/pkg/devops/test.go b/pkg/devops/test.go deleted file mode 100644 index d5116cd..0000000 --- a/pkg/devops/test.go +++ /dev/null @@ -1,167 +0,0 @@ -package devops - -import ( - "context" - "encoding/json" - "fmt" - "os" - "path/filepath" - "strings" - - "gopkg.in/yaml.v3" -) - -// TestConfig holds test configuration from .core/test.yaml. -type TestConfig struct { - Version int `yaml:"version"` - Command string `yaml:"command,omitempty"` - Commands []TestCommand `yaml:"commands,omitempty"` - Env map[string]string `yaml:"env,omitempty"` -} - -// TestCommand is a named test command. -type TestCommand struct { - Name string `yaml:"name"` - Run string `yaml:"run"` -} - -// TestOptions configures test execution. -type TestOptions struct { - Name string // Run specific named command from .core/test.yaml - Command []string // Override command (from -- args) -} - -// Test runs tests in the dev environment. -func (d *DevOps) Test(ctx context.Context, projectDir string, opts TestOptions) error { - running, err := d.IsRunning(ctx) - if err != nil { - return err - } - if !running { - return fmt.Errorf("dev environment not running (run 'core dev boot' first)") - } - - var cmd string - - // Priority: explicit command > named command > auto-detect - if len(opts.Command) > 0 { - cmd = strings.Join(opts.Command, " ") - } else if opts.Name != "" { - cfg, err := LoadTestConfig(projectDir) - if err != nil { - return err - } - for _, c := range cfg.Commands { - if c.Name == opts.Name { - cmd = c.Run - break - } - } - if cmd == "" { - return fmt.Errorf("test command %q not found in .core/test.yaml", opts.Name) - } - } else { - cmd = DetectTestCommand(projectDir) - if cmd == "" { - return fmt.Errorf("could not detect test command (create .core/test.yaml)") - } - } - - // Run via SSH - construct command as single string for shell execution - return d.sshShell(ctx, []string{"cd", "/app", "&&", cmd}) -} - -// DetectTestCommand auto-detects the test command for a project. -func DetectTestCommand(projectDir string) string { - // 1. Check .core/test.yaml - cfg, err := LoadTestConfig(projectDir) - if err == nil && cfg.Command != "" { - return cfg.Command - } - - // 2. Check composer.json for test script - if hasFile(projectDir, "composer.json") { - if hasComposerScript(projectDir, "test") { - return "composer test" - } - } - - // 3. Check package.json for test script - if hasFile(projectDir, "package.json") { - if hasPackageScript(projectDir, "test") { - return "npm test" - } - } - - // 4. Check go.mod - if hasFile(projectDir, "go.mod") { - return "go test ./..." - } - - // 5. Check pytest - if hasFile(projectDir, "pytest.ini") || hasFile(projectDir, "pyproject.toml") { - return "pytest" - } - - // 6. Check Taskfile - if hasFile(projectDir, "Taskfile.yaml") || hasFile(projectDir, "Taskfile.yml") { - return "task test" - } - - return "" -} - -// LoadTestConfig loads .core/test.yaml. -func LoadTestConfig(projectDir string) (*TestConfig, error) { - path := filepath.Join(projectDir, ".core", "test.yaml") - data, err := os.ReadFile(path) - if err != nil { - return nil, err - } - - var cfg TestConfig - if err := yaml.Unmarshal(data, &cfg); err != nil { - return nil, err - } - - return &cfg, nil -} - -func hasFile(dir, name string) bool { - _, err := os.Stat(filepath.Join(dir, name)) - return err == nil -} - -func hasPackageScript(projectDir, script string) bool { - data, err := os.ReadFile(filepath.Join(projectDir, "package.json")) - if err != nil { - return false - } - - var pkg struct { - Scripts map[string]string `json:"scripts"` - } - if err := json.Unmarshal(data, &pkg); err != nil { - return false - } - - _, ok := pkg.Scripts[script] - return ok -} - -func hasComposerScript(projectDir, script string) bool { - data, err := os.ReadFile(filepath.Join(projectDir, "composer.json")) - if err != nil { - return false - } - - var pkg struct { - Scripts map[string]interface{} `json:"scripts"` - } - if err := json.Unmarshal(data, &pkg); err != nil { - return false - } - - _, ok := pkg.Scripts[script] - return ok -} diff --git a/pkg/devops/test_test.go b/pkg/devops/test_test.go deleted file mode 100644 index 4df32bc..0000000 --- a/pkg/devops/test_test.go +++ /dev/null @@ -1,352 +0,0 @@ -package devops - -import ( - "os" - "path/filepath" - "testing" -) - -func TestDetectTestCommand_Good_ComposerJSON(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"scripts":{"test":"pest"}}`), 0644) - - cmd := DetectTestCommand(tmpDir) - if cmd != "composer test" { - t.Errorf("expected 'composer test', got %q", cmd) - } -} - -func TestDetectTestCommand_Good_PackageJSON(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"scripts":{"test":"vitest"}}`), 0644) - - cmd := DetectTestCommand(tmpDir) - if cmd != "npm test" { - t.Errorf("expected 'npm test', got %q", cmd) - } -} - -func TestDetectTestCommand_Good_GoMod(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "go.mod"), []byte("module example"), 0644) - - cmd := DetectTestCommand(tmpDir) - if cmd != "go test ./..." { - t.Errorf("expected 'go test ./...', got %q", cmd) - } -} - -func TestDetectTestCommand_Good_CoreTestYaml(t *testing.T) { - tmpDir := t.TempDir() - coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) - os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte("command: custom-test"), 0644) - - cmd := DetectTestCommand(tmpDir) - if cmd != "custom-test" { - t.Errorf("expected 'custom-test', got %q", cmd) - } -} - -func TestDetectTestCommand_Good_Pytest(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "pytest.ini"), []byte("[pytest]"), 0644) - - cmd := DetectTestCommand(tmpDir) - if cmd != "pytest" { - t.Errorf("expected 'pytest', got %q", cmd) - } -} - -func TestDetectTestCommand_Good_Taskfile(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "Taskfile.yaml"), []byte("version: '3'"), 0644) - - cmd := DetectTestCommand(tmpDir) - if cmd != "task test" { - t.Errorf("expected 'task test', got %q", cmd) - } -} - -func TestDetectTestCommand_Bad_NoFiles(t *testing.T) { - tmpDir := t.TempDir() - - cmd := DetectTestCommand(tmpDir) - if cmd != "" { - t.Errorf("expected empty string, got %q", cmd) - } -} - -func TestDetectTestCommand_Good_Priority(t *testing.T) { - // .core/test.yaml should take priority over other detection methods - tmpDir := t.TempDir() - coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) - os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte("command: my-custom-test"), 0644) - os.WriteFile(filepath.Join(tmpDir, "go.mod"), []byte("module example"), 0644) - - cmd := DetectTestCommand(tmpDir) - if cmd != "my-custom-test" { - t.Errorf("expected 'my-custom-test' (from .core/test.yaml), got %q", cmd) - } -} - -func TestLoadTestConfig_Good(t *testing.T) { - tmpDir := t.TempDir() - coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) - - configYAML := `version: 1 -command: default-test -commands: - - name: unit - run: go test ./... - - name: integration - run: go test -tags=integration ./... -env: - CI: "true" -` - os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte(configYAML), 0644) - - cfg, err := LoadTestConfig(tmpDir) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - - if cfg.Version != 1 { - t.Errorf("expected version 1, got %d", cfg.Version) - } - if cfg.Command != "default-test" { - t.Errorf("expected command 'default-test', got %q", cfg.Command) - } - if len(cfg.Commands) != 2 { - t.Errorf("expected 2 commands, got %d", len(cfg.Commands)) - } - if cfg.Commands[0].Name != "unit" { - t.Errorf("expected first command name 'unit', got %q", cfg.Commands[0].Name) - } - if cfg.Env["CI"] != "true" { - t.Errorf("expected env CI='true', got %q", cfg.Env["CI"]) - } -} - -func TestLoadTestConfig_Bad_NotFound(t *testing.T) { - tmpDir := t.TempDir() - - _, err := LoadTestConfig(tmpDir) - if err == nil { - t.Error("expected error for missing config, got nil") - } -} - -func TestHasPackageScript_Good(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"scripts":{"test":"jest","build":"webpack"}}`), 0644) - - if !hasPackageScript(tmpDir, "test") { - t.Error("expected to find 'test' script") - } - if !hasPackageScript(tmpDir, "build") { - t.Error("expected to find 'build' script") - } -} - -func TestHasPackageScript_Bad_MissingScript(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"scripts":{"build":"webpack"}}`), 0644) - - if hasPackageScript(tmpDir, "test") { - t.Error("expected not to find 'test' script") - } -} - -func TestHasComposerScript_Good(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"scripts":{"test":"pest","post-install-cmd":"@php artisan migrate"}}`), 0644) - - if !hasComposerScript(tmpDir, "test") { - t.Error("expected to find 'test' script") - } -} - -func TestHasComposerScript_Bad_MissingScript(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"scripts":{"build":"@php build.php"}}`), 0644) - - if hasComposerScript(tmpDir, "test") { - t.Error("expected not to find 'test' script") - } -} - -func TestTestConfig_Struct(t *testing.T) { - cfg := &TestConfig{ - Version: 2, - Command: "my-test", - Commands: []TestCommand{{Name: "unit", Run: "go test ./..."}}, - Env: map[string]string{"CI": "true"}, - } - if cfg.Version != 2 { - t.Errorf("expected version 2, got %d", cfg.Version) - } - if cfg.Command != "my-test" { - t.Errorf("expected command 'my-test', got %q", cfg.Command) - } - if len(cfg.Commands) != 1 { - t.Errorf("expected 1 command, got %d", len(cfg.Commands)) - } - if cfg.Env["CI"] != "true" { - t.Errorf("expected CI=true, got %q", cfg.Env["CI"]) - } -} - -func TestTestCommand_Struct(t *testing.T) { - cmd := TestCommand{ - Name: "integration", - Run: "go test -tags=integration ./...", - } - if cmd.Name != "integration" { - t.Errorf("expected name 'integration', got %q", cmd.Name) - } - if cmd.Run != "go test -tags=integration ./..." { - t.Errorf("expected run command, got %q", cmd.Run) - } -} - -func TestTestOptions_Struct(t *testing.T) { - opts := TestOptions{ - Name: "unit", - Command: []string{"go", "test", "-v"}, - } - if opts.Name != "unit" { - t.Errorf("expected name 'unit', got %q", opts.Name) - } - if len(opts.Command) != 3 { - t.Errorf("expected 3 command parts, got %d", len(opts.Command)) - } -} - -func TestDetectTestCommand_Good_TaskfileYml(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "Taskfile.yml"), []byte("version: '3'"), 0644) - - cmd := DetectTestCommand(tmpDir) - if cmd != "task test" { - t.Errorf("expected 'task test', got %q", cmd) - } -} - -func TestDetectTestCommand_Good_Pyproject(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "pyproject.toml"), []byte("[tool.pytest]"), 0644) - - cmd := DetectTestCommand(tmpDir) - if cmd != "pytest" { - t.Errorf("expected 'pytest', got %q", cmd) - } -} - -func TestHasPackageScript_Bad_NoFile(t *testing.T) { - tmpDir := t.TempDir() - - if hasPackageScript(tmpDir, "test") { - t.Error("expected false for missing package.json") - } -} - -func TestHasPackageScript_Bad_InvalidJSON(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`invalid json`), 0644) - - if hasPackageScript(tmpDir, "test") { - t.Error("expected false for invalid JSON") - } -} - -func TestHasPackageScript_Bad_NoScripts(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"name":"test"}`), 0644) - - if hasPackageScript(tmpDir, "test") { - t.Error("expected false for missing scripts section") - } -} - -func TestHasComposerScript_Bad_NoFile(t *testing.T) { - tmpDir := t.TempDir() - - if hasComposerScript(tmpDir, "test") { - t.Error("expected false for missing composer.json") - } -} - -func TestHasComposerScript_Bad_InvalidJSON(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`invalid json`), 0644) - - if hasComposerScript(tmpDir, "test") { - t.Error("expected false for invalid JSON") - } -} - -func TestHasComposerScript_Bad_NoScripts(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"name":"test/pkg"}`), 0644) - - if hasComposerScript(tmpDir, "test") { - t.Error("expected false for missing scripts section") - } -} - -func TestLoadTestConfig_Bad_InvalidYAML(t *testing.T) { - tmpDir := t.TempDir() - coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) - os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte("invalid: yaml: :"), 0644) - - _, err := LoadTestConfig(tmpDir) - if err == nil { - t.Error("expected error for invalid YAML") - } -} - -func TestLoadTestConfig_Good_MinimalConfig(t *testing.T) { - tmpDir := t.TempDir() - coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) - os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte("version: 1"), 0644) - - cfg, err := LoadTestConfig(tmpDir) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - if cfg.Version != 1 { - t.Errorf("expected version 1, got %d", cfg.Version) - } - if cfg.Command != "" { - t.Errorf("expected empty command, got %q", cfg.Command) - } -} - -func TestDetectTestCommand_Good_ComposerWithoutScript(t *testing.T) { - tmpDir := t.TempDir() - // composer.json without test script should not return composer test - os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"name":"test/pkg"}`), 0644) - - cmd := DetectTestCommand(tmpDir) - // Falls through to empty (no match) - if cmd != "" { - t.Errorf("expected empty string, got %q", cmd) - } -} - -func TestDetectTestCommand_Good_PackageJSONWithoutScript(t *testing.T) { - tmpDir := t.TempDir() - // package.json without test or dev script - os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"name":"test"}`), 0644) - - cmd := DetectTestCommand(tmpDir) - // Falls through to empty - if cmd != "" { - t.Errorf("expected empty string, got %q", cmd) - } -} diff --git a/pkg/docs/cmd_commands.go b/pkg/docs/cmd_commands.go deleted file mode 100644 index e17dabb..0000000 --- a/pkg/docs/cmd_commands.go +++ /dev/null @@ -1,20 +0,0 @@ -// Package docs provides documentation management commands for multi-repo workspaces. -// -// Commands: -// - list: Scan repos for README.md, CLAUDE.md, CHANGELOG.md, docs/ -// - sync: Copy docs/ files from all repos to core-php/docs/packages/ -// -// Works with repos.yaml to discover repositories and sync documentation -// to a central location for unified documentation builds. -package docs - -import "github.com/host-uk/core/pkg/cli" - -func init() { - cli.RegisterCommands(AddDocsCommands) -} - -// AddDocsCommands registers the 'docs' command and all subcommands. -func AddDocsCommands(root *cli.Command) { - root.AddCommand(docsCmd) -} diff --git a/pkg/docs/cmd_docs.go b/pkg/docs/cmd_docs.go deleted file mode 100644 index c75687c..0000000 --- a/pkg/docs/cmd_docs.go +++ /dev/null @@ -1,31 +0,0 @@ -// Package docs provides documentation management commands. -package docs - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -// Style and utility aliases from shared -var ( - repoNameStyle = cli.RepoStyle - successStyle = cli.SuccessStyle - errorStyle = cli.ErrorStyle - dimStyle = cli.DimStyle - headerStyle = cli.HeaderStyle - confirm = cli.Confirm - docsFoundStyle = cli.SuccessStyle - docsMissingStyle = cli.DimStyle - docsFileStyle = cli.InfoStyle -) - -var docsCmd = &cli.Command{ - Use: "docs", - Short: i18n.T("cmd.docs.short"), - Long: i18n.T("cmd.docs.long"), -} - -func init() { - docsCmd.AddCommand(docsSyncCmd) - docsCmd.AddCommand(docsListCmd) -} diff --git a/pkg/docs/cmd_list.go b/pkg/docs/cmd_list.go deleted file mode 100644 index 8df4066..0000000 --- a/pkg/docs/cmd_list.go +++ /dev/null @@ -1,83 +0,0 @@ -package docs - -import ( - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -// Flag variable for list command -var docsListRegistryPath string - -var docsListCmd = &cli.Command{ - Use: "list", - Short: i18n.T("cmd.docs.list.short"), - Long: i18n.T("cmd.docs.list.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runDocsList(docsListRegistryPath) - }, -} - -func init() { - docsListCmd.Flags().StringVar(&docsListRegistryPath, "registry", "", i18n.T("common.flag.registry")) -} - -func runDocsList(registryPath string) error { - reg, _, err := loadRegistry(registryPath) - if err != nil { - return err - } - - cli.Print("\n%-20s %-8s %-8s %-10s %s\n", - headerStyle.Render(i18n.Label("repo")), - headerStyle.Render(i18n.T("cmd.docs.list.header.readme")), - headerStyle.Render(i18n.T("cmd.docs.list.header.claude")), - headerStyle.Render(i18n.T("cmd.docs.list.header.changelog")), - headerStyle.Render(i18n.T("cmd.docs.list.header.docs")), - ) - cli.Text(strings.Repeat("─", 70)) - - var withDocs, withoutDocs int - for _, repo := range reg.List() { - info := scanRepoDocs(repo) - - readme := checkMark(info.Readme != "") - claude := checkMark(info.ClaudeMd != "") - changelog := checkMark(info.Changelog != "") - - docsDir := checkMark(false) - if len(info.DocsFiles) > 0 { - docsDir = docsFoundStyle.Render(i18n.T("common.count.files", map[string]interface{}{"Count": len(info.DocsFiles)})) - } - - cli.Print("%-20s %-8s %-8s %-10s %s\n", - repoNameStyle.Render(info.Name), - readme, - claude, - changelog, - docsDir, - ) - - if info.HasDocs { - withDocs++ - } else { - withoutDocs++ - } - } - - cli.Blank() - cli.Print("%s %s\n", - cli.KeyStyle.Render(i18n.Label("coverage")), - i18n.T("cmd.docs.list.coverage_summary", map[string]interface{}{"WithDocs": withDocs, "WithoutDocs": withoutDocs}), - ) - - return nil -} - -func checkMark(ok bool) string { - if ok { - return cli.Glyph(":check:") - } - return cli.Glyph(":cross:") -} diff --git a/pkg/docs/cmd_scan.go b/pkg/docs/cmd_scan.go deleted file mode 100644 index 9920b5f..0000000 --- a/pkg/docs/cmd_scan.go +++ /dev/null @@ -1,147 +0,0 @@ -package docs - -import ( - "io/fs" - "os" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" - "github.com/host-uk/core/pkg/workspace" -) - -// RepoDocInfo holds documentation info for a repo -type RepoDocInfo struct { - Name string - Path string - HasDocs bool - Readme string - ClaudeMd string - Changelog string - DocsFiles []string // All files in docs/ directory (recursive) -} - -func loadRegistry(registryPath string) (*repos.Registry, string, error) { - var reg *repos.Registry - var err error - var registryDir string - - if registryPath != "" { - reg, err = repos.LoadRegistry(registryPath) - if err != nil { - return nil, "", cli.Wrap(err, i18n.T("i18n.fail.load", "registry")) - } - registryDir = filepath.Dir(registryPath) - } else { - registryPath, err = repos.FindRegistry() - if err == nil { - reg, err = repos.LoadRegistry(registryPath) - if err != nil { - return nil, "", cli.Wrap(err, i18n.T("i18n.fail.load", "registry")) - } - registryDir = filepath.Dir(registryPath) - } else { - cwd, _ := os.Getwd() - reg, err = repos.ScanDirectory(cwd) - if err != nil { - return nil, "", cli.Wrap(err, i18n.T("i18n.fail.scan", "directory")) - } - registryDir = cwd - } - } - - // Load workspace config to respect packages_dir - wsConfig, err := workspace.LoadConfig(registryDir) - if err != nil { - return nil, "", cli.Wrap(err, i18n.T("i18n.fail.load", "workspace config")) - } - - basePath := registryDir - - if wsConfig != nil && wsConfig.PackagesDir != "" && wsConfig.PackagesDir != "./packages" { - pkgDir := wsConfig.PackagesDir - - // Expand ~ - if strings.HasPrefix(pkgDir, "~/") { - home, _ := os.UserHomeDir() - pkgDir = filepath.Join(home, pkgDir[2:]) - } - - if !filepath.IsAbs(pkgDir) { - pkgDir = filepath.Join(registryDir, pkgDir) - } - basePath = pkgDir - - // Update repo paths if they were relative to registry - // This ensures consistency when packages_dir overrides the default - reg.BasePath = basePath - for _, repo := range reg.Repos { - repo.Path = filepath.Join(basePath, repo.Name) - } - } - - return reg, basePath, nil -} - -func scanRepoDocs(repo *repos.Repo) RepoDocInfo { - info := RepoDocInfo{ - Name: repo.Name, - Path: repo.Path, - } - - // Check for README.md - readme := filepath.Join(repo.Path, "README.md") - if _, err := os.Stat(readme); err == nil { - info.Readme = readme - info.HasDocs = true - } - - // Check for CLAUDE.md - claudeMd := filepath.Join(repo.Path, "CLAUDE.md") - if _, err := os.Stat(claudeMd); err == nil { - info.ClaudeMd = claudeMd - info.HasDocs = true - } - - // Check for CHANGELOG.md - changelog := filepath.Join(repo.Path, "CHANGELOG.md") - if _, err := os.Stat(changelog); err == nil { - info.Changelog = changelog - info.HasDocs = true - } - - // Recursively scan docs/ directory for .md files - docsDir := filepath.Join(repo.Path, "docs") - if _, err := os.Stat(docsDir); err == nil { - filepath.WalkDir(docsDir, func(path string, d fs.DirEntry, err error) error { - if err != nil { - return nil - } - // Skip plans/ directory - if d.IsDir() && d.Name() == "plans" { - return filepath.SkipDir - } - // Skip non-markdown files - if d.IsDir() || !strings.HasSuffix(d.Name(), ".md") { - return nil - } - // Get relative path from docs/ - relPath, _ := filepath.Rel(docsDir, path) - info.DocsFiles = append(info.DocsFiles, relPath) - info.HasDocs = true - return nil - }) - } - - return info -} - -func copyFile(src, dst string) error { - data, err := os.ReadFile(src) - if err != nil { - return err - } - return os.WriteFile(dst, data, 0644) -} diff --git a/pkg/docs/cmd_sync.go b/pkg/docs/cmd_sync.go deleted file mode 100644 index de9e731..0000000 --- a/pkg/docs/cmd_sync.go +++ /dev/null @@ -1,155 +0,0 @@ -package docs - -import ( - "os" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -// Flag variables for sync command -var ( - docsSyncRegistryPath string - docsSyncDryRun bool - docsSyncOutputDir string -) - -var docsSyncCmd = &cli.Command{ - Use: "sync", - Short: i18n.T("cmd.docs.sync.short"), - Long: i18n.T("cmd.docs.sync.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runDocsSync(docsSyncRegistryPath, docsSyncOutputDir, docsSyncDryRun) - }, -} - -func init() { - docsSyncCmd.Flags().StringVar(&docsSyncRegistryPath, "registry", "", i18n.T("common.flag.registry")) - docsSyncCmd.Flags().BoolVar(&docsSyncDryRun, "dry-run", false, i18n.T("cmd.docs.sync.flag.dry_run")) - docsSyncCmd.Flags().StringVar(&docsSyncOutputDir, "output", "", i18n.T("cmd.docs.sync.flag.output")) -} - -// packageOutputName maps repo name to output folder name -func packageOutputName(repoName string) string { - // core -> go (the Go framework) - if repoName == "core" { - return "go" - } - // core-admin -> admin, core-api -> api, etc. - if strings.HasPrefix(repoName, "core-") { - return strings.TrimPrefix(repoName, "core-") - } - return repoName -} - -// shouldSyncRepo returns true if this repo should be synced -func shouldSyncRepo(repoName string) bool { - // Skip core-php (it's the destination) - if repoName == "core-php" { - return false - } - // Skip template - if repoName == "core-template" { - return false - } - return true -} - -func runDocsSync(registryPath string, outputDir string, dryRun bool) error { - // Find or use provided registry - reg, basePath, err := loadRegistry(registryPath) - if err != nil { - return err - } - - // Default output to core-php/docs/packages relative to registry - if outputDir == "" { - outputDir = filepath.Join(basePath, "core-php", "docs", "packages") - } - - // Scan all repos for docs - var docsInfo []RepoDocInfo - for _, repo := range reg.List() { - if !shouldSyncRepo(repo.Name) { - continue - } - info := scanRepoDocs(repo) - if info.HasDocs && len(info.DocsFiles) > 0 { - docsInfo = append(docsInfo, info) - } - } - - if len(docsInfo) == 0 { - cli.Text(i18n.T("cmd.docs.sync.no_docs_found")) - return nil - } - - cli.Print("\n%s %s\n\n", dimStyle.Render(i18n.T("cmd.docs.sync.found_label")), i18n.T("cmd.docs.sync.repos_with_docs", map[string]interface{}{"Count": len(docsInfo)})) - - // Show what will be synced - var totalFiles int - for _, info := range docsInfo { - totalFiles += len(info.DocsFiles) - outName := packageOutputName(info.Name) - cli.Print(" %s → %s %s\n", - repoNameStyle.Render(info.Name), - docsFileStyle.Render("packages/"+outName+"/"), - dimStyle.Render(i18n.T("cmd.docs.sync.files_count", map[string]interface{}{"Count": len(info.DocsFiles)}))) - - for _, f := range info.DocsFiles { - cli.Print(" %s\n", dimStyle.Render(f)) - } - } - - cli.Print("\n%s %s\n", - dimStyle.Render(i18n.Label("total")), - i18n.T("cmd.docs.sync.total_summary", map[string]interface{}{"Files": totalFiles, "Repos": len(docsInfo), "Output": outputDir})) - - if dryRun { - cli.Print("\n%s\n", dimStyle.Render(i18n.T("cmd.docs.sync.dry_run_notice"))) - return nil - } - - // Confirm - cli.Blank() - if !confirm(i18n.T("cmd.docs.sync.confirm")) { - cli.Text(i18n.T("common.prompt.abort")) - return nil - } - - // Sync docs - cli.Blank() - var synced int - for _, info := range docsInfo { - outName := packageOutputName(info.Name) - repoOutDir := filepath.Join(outputDir, outName) - - // Clear existing directory - os.RemoveAll(repoOutDir) - - if err := os.MkdirAll(repoOutDir, 0755); err != nil { - cli.Print(" %s %s: %s\n", errorStyle.Render("✗"), info.Name, err) - continue - } - - // Copy all docs files - docsDir := filepath.Join(info.Path, "docs") - for _, f := range info.DocsFiles { - src := filepath.Join(docsDir, f) - dst := filepath.Join(repoOutDir, f) - os.MkdirAll(filepath.Dir(dst), 0755) - if err := copyFile(src, dst); err != nil { - cli.Print(" %s %s: %s\n", errorStyle.Render("✗"), f, err) - } - } - - cli.Print(" %s %s → packages/%s/\n", successStyle.Render("✓"), info.Name, outName) - synced++ - } - - cli.Print("\n%s %s\n", successStyle.Render(i18n.T("i18n.done.sync")), i18n.T("cmd.docs.sync.synced_packages", map[string]interface{}{"Count": synced})) - - return nil -} \ No newline at end of file diff --git a/pkg/doctor/cmd_checks.go b/pkg/doctor/cmd_checks.go deleted file mode 100644 index fee8dbb..0000000 --- a/pkg/doctor/cmd_checks.go +++ /dev/null @@ -1,101 +0,0 @@ -package doctor - -import ( - "os/exec" - "strings" - - "github.com/host-uk/core/pkg/i18n" -) - -// check represents a tool check configuration -type check struct { - name string - description string - command string - args []string - versionFlag string -} - -// requiredChecks returns tools that must be installed -func requiredChecks() []check { - return []check{ - { - name: i18n.T("cmd.doctor.check.git.name"), - description: i18n.T("cmd.doctor.check.git.description"), - command: "git", - args: []string{"--version"}, - versionFlag: "--version", - }, - { - name: i18n.T("cmd.doctor.check.gh.name"), - description: i18n.T("cmd.doctor.check.gh.description"), - command: "gh", - args: []string{"--version"}, - versionFlag: "--version", - }, - { - name: i18n.T("cmd.doctor.check.php.name"), - description: i18n.T("cmd.doctor.check.php.description"), - command: "php", - args: []string{"-v"}, - versionFlag: "-v", - }, - { - name: i18n.T("cmd.doctor.check.composer.name"), - description: i18n.T("cmd.doctor.check.composer.description"), - command: "composer", - args: []string{"--version"}, - versionFlag: "--version", - }, - { - name: i18n.T("cmd.doctor.check.node.name"), - description: i18n.T("cmd.doctor.check.node.description"), - command: "node", - args: []string{"--version"}, - versionFlag: "--version", - }, - } -} - -// optionalChecks returns tools that are nice to have -func optionalChecks() []check { - return []check{ - { - name: i18n.T("cmd.doctor.check.pnpm.name"), - description: i18n.T("cmd.doctor.check.pnpm.description"), - command: "pnpm", - args: []string{"--version"}, - versionFlag: "--version", - }, - { - name: i18n.T("cmd.doctor.check.claude.name"), - description: i18n.T("cmd.doctor.check.claude.description"), - command: "claude", - args: []string{"--version"}, - versionFlag: "--version", - }, - { - name: i18n.T("cmd.doctor.check.docker.name"), - description: i18n.T("cmd.doctor.check.docker.description"), - command: "docker", - args: []string{"--version"}, - versionFlag: "--version", - }, - } -} - -// runCheck executes a tool check and returns success status and version info -func runCheck(c check) (bool, string) { - cmd := exec.Command(c.command, c.args...) - output, err := cmd.CombinedOutput() - if err != nil { - return false, "" - } - - // Extract first line as version - lines := strings.Split(strings.TrimSpace(string(output)), "\n") - if len(lines) > 0 { - return true, strings.TrimSpace(lines[0]) - } - return true, "" -} diff --git a/pkg/doctor/cmd_commands.go b/pkg/doctor/cmd_commands.go deleted file mode 100644 index 91c8efa..0000000 --- a/pkg/doctor/cmd_commands.go +++ /dev/null @@ -1,25 +0,0 @@ -// Package doctor provides environment validation commands. -// -// Checks for: -// - Required tools: git, gh, php, composer, node -// - Optional tools: pnpm, claude, docker -// - GitHub access: SSH keys and CLI authentication -// - Workspace: repos.yaml presence and clone status -// -// Run before 'core setup' to ensure your environment is ready. -// Provides platform-specific installation instructions for missing tools. -package doctor - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/spf13/cobra" -) - -func init() { - cli.RegisterCommands(AddDoctorCommands) -} - -// AddDoctorCommands registers the 'doctor' command and all subcommands. -func AddDoctorCommands(root *cobra.Command) { - root.AddCommand(doctorCmd) -} diff --git a/pkg/doctor/cmd_doctor.go b/pkg/doctor/cmd_doctor.go deleted file mode 100644 index ce38e53..0000000 --- a/pkg/doctor/cmd_doctor.go +++ /dev/null @@ -1,121 +0,0 @@ -// Package doctor provides environment check commands. -package doctor - -import ( - "fmt" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -// Style aliases from shared -var ( - successStyle = cli.SuccessStyle - errorStyle = cli.ErrorStyle - dimStyle = cli.DimStyle -) - -// Flag variable for doctor command -var doctorVerbose bool - -var doctorCmd = &cobra.Command{ - Use: "doctor", - Short: i18n.T("cmd.doctor.short"), - Long: i18n.T("cmd.doctor.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return runDoctor(doctorVerbose) - }, -} - -func init() { - doctorCmd.Flags().BoolVar(&doctorVerbose, "verbose", false, i18n.T("cmd.doctor.verbose_flag")) -} - -func runDoctor(verbose bool) error { - fmt.Println(i18n.T("common.progress.checking", map[string]any{"Item": "development environment"})) - fmt.Println() - - var passed, failed, optional int - - // Check required tools - fmt.Println(i18n.T("cmd.doctor.required")) - for _, c := range requiredChecks() { - ok, version := runCheck(c) - if ok { - if verbose { - fmt.Println(formatCheckResult(true, c.name, version)) - } else { - fmt.Println(formatCheckResult(true, c.name, "")) - } - passed++ - } else { - fmt.Printf(" %s %s - %s\n", errorStyle.Render(cli.Glyph(":cross:")), c.name, c.description) - failed++ - } - } - - // Check optional tools - fmt.Printf("\n%s\n", i18n.T("cmd.doctor.optional")) - for _, c := range optionalChecks() { - ok, version := runCheck(c) - if ok { - if verbose { - fmt.Println(formatCheckResult(true, c.name, version)) - } else { - fmt.Println(formatCheckResult(true, c.name, "")) - } - passed++ - } else { - fmt.Printf(" %s %s - %s\n", dimStyle.Render(cli.Glyph(":skip:")), c.name, dimStyle.Render(c.description)) - optional++ - } - } - - // Check GitHub access - fmt.Printf("\n%s\n", i18n.T("cmd.doctor.github")) - if checkGitHubSSH() { - fmt.Println(formatCheckResult(true, i18n.T("cmd.doctor.ssh_found"), "")) - } else { - fmt.Printf(" %s %s\n", errorStyle.Render(cli.Glyph(":cross:")), i18n.T("cmd.doctor.ssh_missing")) - failed++ - } - - if checkGitHubCLI() { - fmt.Println(formatCheckResult(true, i18n.T("cmd.doctor.cli_auth"), "")) - } else { - fmt.Printf(" %s %s\n", errorStyle.Render(cli.Glyph(":cross:")), i18n.T("cmd.doctor.cli_auth_missing")) - failed++ - } - - // Check workspace - fmt.Printf("\n%s\n", i18n.T("cmd.doctor.workspace")) - checkWorkspace() - - // Summary - fmt.Println() - if failed > 0 { - cli.Error(i18n.T("cmd.doctor.issues", map[string]interface{}{"Count": failed})) - fmt.Printf("\n%s\n", i18n.T("cmd.doctor.install_missing")) - printInstallInstructions() - return fmt.Errorf("%s", i18n.T("cmd.doctor.issues_error", map[string]interface{}{"Count": failed})) - } - - cli.Success(i18n.T("cmd.doctor.ready")) - return nil -} - -func formatCheckResult(ok bool, name, detail string) string { - check := cli.Check(name) - if ok { - check.Pass() - } else { - check.Fail() - } - if detail != "" { - check.Message(detail) - } else { - check.Message("") - } - return check.String() -} diff --git a/pkg/doctor/cmd_environment.go b/pkg/doctor/cmd_environment.go deleted file mode 100644 index 2e8ea28..0000000 --- a/pkg/doctor/cmd_environment.go +++ /dev/null @@ -1,78 +0,0 @@ -package doctor - -import ( - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" -) - -// checkGitHubSSH checks if SSH keys exist for GitHub access -func checkGitHubSSH() bool { - // Just check if SSH keys exist - don't try to authenticate - // (key might be locked/passphrase protected) - home, err := os.UserHomeDir() - if err != nil { - return false - } - - sshDir := filepath.Join(home, ".ssh") - keyPatterns := []string{"id_rsa", "id_ed25519", "id_ecdsa", "id_dsa"} - - for _, key := range keyPatterns { - keyPath := filepath.Join(sshDir, key) - if _, err := os.Stat(keyPath); err == nil { - return true - } - } - - return false -} - -// checkGitHubCLI checks if the GitHub CLI is authenticated -func checkGitHubCLI() bool { - cmd := exec.Command("gh", "auth", "status") - output, _ := cmd.CombinedOutput() - // Check for any successful login (even if there's also a failing token) - return strings.Contains(string(output), "Logged in to") -} - -// checkWorkspace checks for repos.yaml and counts cloned repos -func checkWorkspace() { - registryPath, err := repos.FindRegistry() - if err == nil { - fmt.Printf(" %s %s\n", successStyle.Render("✓"), i18n.T("cmd.doctor.repos_yaml_found", map[string]interface{}{"Path": registryPath})) - - reg, err := repos.LoadRegistry(registryPath) - if err == nil { - basePath := reg.BasePath - if basePath == "" { - basePath = "./packages" - } - if !filepath.IsAbs(basePath) { - basePath = filepath.Join(filepath.Dir(registryPath), basePath) - } - if strings.HasPrefix(basePath, "~/") { - home, _ := os.UserHomeDir() - basePath = filepath.Join(home, basePath[2:]) - } - - // Count existing repos - allRepos := reg.List() - var cloned int - for _, repo := range allRepos { - repoPath := filepath.Join(basePath, repo.Name) - if _, err := os.Stat(filepath.Join(repoPath, ".git")); err == nil { - cloned++ - } - } - fmt.Printf(" %s %s\n", successStyle.Render("✓"), i18n.T("cmd.doctor.repos_cloned", map[string]interface{}{"Cloned": cloned, "Total": len(allRepos)})) - } - } else { - fmt.Printf(" %s %s\n", dimStyle.Render("○"), i18n.T("cmd.doctor.no_repos_yaml")) - } -} diff --git a/pkg/doctor/cmd_install.go b/pkg/doctor/cmd_install.go deleted file mode 100644 index ade4c50..0000000 --- a/pkg/doctor/cmd_install.go +++ /dev/null @@ -1,26 +0,0 @@ -package doctor - -import ( - "fmt" - "runtime" - - "github.com/host-uk/core/pkg/i18n" -) - -// printInstallInstructions prints OS-specific installation instructions -func printInstallInstructions() { - switch runtime.GOOS { - case "darwin": - fmt.Printf(" %s\n", i18n.T("cmd.doctor.install_macos")) - fmt.Printf(" %s\n", i18n.T("cmd.doctor.install_macos_cask")) - case "linux": - fmt.Printf(" %s\n", i18n.T("cmd.doctor.install_linux_header")) - fmt.Printf(" %s\n", i18n.T("cmd.doctor.install_linux_git")) - fmt.Printf(" %s\n", i18n.T("cmd.doctor.install_linux_gh")) - fmt.Printf(" %s\n", i18n.T("cmd.doctor.install_linux_php")) - fmt.Printf(" %s\n", i18n.T("cmd.doctor.install_linux_node")) - fmt.Printf(" %s\n", i18n.T("cmd.doctor.install_linux_pnpm")) - default: - fmt.Printf(" %s\n", i18n.T("cmd.doctor.install_other")) - } -} diff --git a/pkg/errors/errors.go b/pkg/errors/errors.go deleted file mode 100644 index 19741d1..0000000 --- a/pkg/errors/errors.go +++ /dev/null @@ -1,151 +0,0 @@ -// Package errors provides structured error handling for Core applications. -// -// Errors include operational context (what was being done) and support -// error wrapping for debugging while keeping user-facing messages clean: -// -// err := errors.E("user.Create", "email already exists", nil) -// err := errors.Wrap(dbErr, "user.Create", "failed to save user") -// -// // Check error types -// if errors.Is(err, sql.ErrNoRows) { ... } -// -// // Extract operation -// var e *errors.Error -// if errors.As(err, &e) { -// fmt.Println("Operation:", e.Op) -// } -package errors - -import ( - stderrors "errors" - "fmt" -) - -// Error represents a structured error with operational context. -type Error struct { - Op string // Operation being performed (e.g., "user.Create") - Msg string // Human-readable message - Err error // Underlying error (optional) - Code string // Error code for i18n/categorisation (optional) -} - -// E creates a new Error with operation context. -// -// err := errors.E("config.Load", "file not found", os.ErrNotExist) -// err := errors.E("api.Call", "rate limited", nil) -func E(op, msg string, err error) error { - return &Error{Op: op, Msg: msg, Err: err} -} - -// Wrap wraps an error with operation context. -// Returns nil if err is nil. -// -// return errors.Wrap(err, "db.Query", "failed to fetch user") -func Wrap(err error, op, msg string) error { - if err == nil { - return nil - } - return &Error{Op: op, Msg: msg, Err: err} -} - -// WrapCode wraps an error with operation context and an error code. -// -// return errors.WrapCode(err, "ERR_NOT_FOUND", "user.Get", "user not found") -func WrapCode(err error, code, op, msg string) error { - if err == nil && code == "" { - return nil - } - return &Error{Op: op, Msg: msg, Err: err, Code: code} -} - -// Code creates an error with just a code and message. -// -// return errors.Code("ERR_VALIDATION", "invalid email format") -func Code(code, msg string) error { - return &Error{Code: code, Msg: msg} -} - -// Error returns the error message. -func (e *Error) Error() string { - if e.Op != "" && e.Err != nil { - return fmt.Sprintf("%s: %s: %v", e.Op, e.Msg, e.Err) - } - if e.Op != "" { - return fmt.Sprintf("%s: %s", e.Op, e.Msg) - } - if e.Err != nil { - return fmt.Sprintf("%s: %v", e.Msg, e.Err) - } - return e.Msg -} - -// Unwrap returns the underlying error. -func (e *Error) Unwrap() error { - return e.Err -} - -// --- Standard library wrappers --- - -// Is reports whether any error in err's tree matches target. -func Is(err, target error) bool { - return stderrors.Is(err, target) -} - -// As finds the first error in err's tree that matches target. -func As(err error, target any) bool { - return stderrors.As(err, target) -} - -// New returns an error with the given text. -func New(text string) error { - return stderrors.New(text) -} - -// Join returns an error that wraps the given errors. -func Join(errs ...error) error { - return stderrors.Join(errs...) -} - -// --- Helper functions --- - -// Op extracts the operation from an error, or empty string if not an Error. -func Op(err error) string { - var e *Error - if As(err, &e) { - return e.Op - } - return "" -} - -// ErrCode extracts the error code, or empty string if not set. -func ErrCode(err error) string { - var e *Error - if As(err, &e) { - return e.Code - } - return "" -} - -// Message extracts the message from an error. -// For Error types, returns Msg; otherwise returns err.Error(). -func Message(err error) string { - if err == nil { - return "" - } - var e *Error - if As(err, &e) { - return e.Msg - } - return err.Error() -} - -// Root returns the deepest error in the chain. -func Root(err error) error { - for { - unwrapped := stderrors.Unwrap(err) - if unwrapped == nil { - return err - } - err = unwrapped - } -} diff --git a/pkg/errors/errors_test.go b/pkg/errors/errors_test.go deleted file mode 100644 index 383c3c3..0000000 --- a/pkg/errors/errors_test.go +++ /dev/null @@ -1,182 +0,0 @@ -package errors - -import ( - "io" - "testing" -) - -func TestE(t *testing.T) { - err := E("user.Create", "validation failed", nil) - - if err.Error() != "user.Create: validation failed" { - t.Errorf("unexpected error message: %s", err.Error()) - } -} - -func TestE_WithUnderlying(t *testing.T) { - underlying := New("database connection failed") - err := E("user.Create", "failed to save", underlying) - - if err.Error() != "user.Create: failed to save: database connection failed" { - t.Errorf("unexpected error message: %s", err.Error()) - } -} - -func TestWrap(t *testing.T) { - // Wrap nil returns nil - if Wrap(nil, "op", "msg") != nil { - t.Error("expected Wrap(nil) to return nil") - } - - // Wrap error - underlying := New("original") - err := Wrap(underlying, "user.Get", "failed") - - if !Is(err, underlying) { - t.Error("expected wrapped error to match underlying") - } -} - -func TestWrapCode(t *testing.T) { - underlying := New("not found") - err := WrapCode(underlying, "ERR_NOT_FOUND", "user.Get", "user not found") - - var e *Error - if !As(err, &e) { - t.Fatal("expected error to be *Error") - } - - if e.Code != "ERR_NOT_FOUND" { - t.Errorf("expected code ERR_NOT_FOUND, got %s", e.Code) - } -} - -func TestCode(t *testing.T) { - err := Code("ERR_VALIDATION", "invalid email") - - var e *Error - if !As(err, &e) { - t.Fatal("expected error to be *Error") - } - - if e.Code != "ERR_VALIDATION" { - t.Errorf("expected code ERR_VALIDATION, got %s", e.Code) - } - if e.Msg != "invalid email" { - t.Errorf("expected msg 'invalid email', got %s", e.Msg) - } -} - -func TestIs(t *testing.T) { - err := Wrap(io.EOF, "read", "failed") - - if !Is(err, io.EOF) { - t.Error("expected Is to find io.EOF in chain") - } - - if Is(err, io.ErrClosedPipe) { - t.Error("expected Is to not find io.ErrClosedPipe") - } -} - -func TestAs(t *testing.T) { - err := E("test.Op", "test message", nil) - - var e *Error - if !As(err, &e) { - t.Fatal("expected As to find *Error") - } - - if e.Op != "test.Op" { - t.Errorf("expected Op 'test.Op', got %s", e.Op) - } -} - -func TestOp(t *testing.T) { - err := E("user.Create", "failed", nil) - - if Op(err) != "user.Create" { - t.Errorf("expected Op 'user.Create', got %s", Op(err)) - } - - // Non-Error returns empty string - if Op(New("plain error")) != "" { - t.Error("expected empty Op for non-Error") - } -} - -func TestErrCode(t *testing.T) { - err := Code("ERR_TEST", "test") - - if ErrCode(err) != "ERR_TEST" { - t.Errorf("expected code ERR_TEST, got %s", ErrCode(err)) - } - - // Non-Error returns empty string - if ErrCode(New("plain error")) != "" { - t.Error("expected empty code for non-Error") - } -} - -func TestMessage(t *testing.T) { - err := E("op", "the message", nil) - - if Message(err) != "the message" { - t.Errorf("expected 'the message', got %s", Message(err)) - } - - // Plain error returns full error string - plain := New("plain error") - if Message(plain) != "plain error" { - t.Errorf("expected 'plain error', got %s", Message(plain)) - } - - // Nil returns empty string - if Message(nil) != "" { - t.Error("expected empty string for nil") - } -} - -func TestRoot(t *testing.T) { - root := New("root cause") - mid := Wrap(root, "mid", "middle") - top := Wrap(mid, "top", "top level") - - if Root(top) != root { - t.Error("expected Root to return deepest error") - } - - // Single error returns itself - single := New("single") - if Root(single) != single { - t.Error("expected Root of single error to return itself") - } -} - -func TestError_Unwrap(t *testing.T) { - underlying := New("underlying") - err := E("op", "msg", underlying) - - var e *Error - if !As(err, &e) { - t.Fatal("expected *Error") - } - - if e.Unwrap() != underlying { - t.Error("expected Unwrap to return underlying error") - } -} - -func TestJoin(t *testing.T) { - err1 := New("error 1") - err2 := New("error 2") - - joined := Join(err1, err2) - - if !Is(joined, err1) { - t.Error("expected joined error to contain err1") - } - if !Is(joined, err2) { - t.Error("expected joined error to contain err2") - } -} diff --git a/pkg/framework/core/core.go b/pkg/framework/core/core.go deleted file mode 100644 index ade5b94..0000000 --- a/pkg/framework/core/core.go +++ /dev/null @@ -1,364 +0,0 @@ -package core - -import ( - "context" - "embed" - "errors" - "fmt" - "reflect" - "strings" -) - -// New initialises a Core instance using the provided options and performs the necessary setup. -// It is the primary entry point for creating a new Core application. -// -// Example: -// -// core, err := core.New( -// core.WithService(&MyService{}), -// core.WithAssets(assets), -// ) -func New(opts ...Option) (*Core, error) { - c := &Core{ - services: make(map[string]any), - Features: &Features{}, - } - for _, o := range opts { - if err := o(c); err != nil { - return nil, err - } - } - - if c.serviceLock { - c.servicesLocked = true - } - return c, nil -} - -// WithService creates an Option that registers a service. It automatically discovers -// the service name from its package path and registers its IPC handler if it -// implements a method named `HandleIPCEvents`. -// -// Example: -// -// // In myapp/services/calculator.go -// package services -// -// type Calculator struct{} -// -// func (s *Calculator) Add(a, b int) int { return a + b } -// -// // In main.go -// import "myapp/services" -// -// core.New(core.WithService(services.NewCalculator)) -func WithService(factory func(*Core) (any, error)) Option { - return func(c *Core) error { - serviceInstance, err := factory(c) - - if err != nil { - return fmt.Errorf("core: failed to create service: %w", err) - } - - // --- Service Name Discovery --- - typeOfService := reflect.TypeOf(serviceInstance) - if typeOfService.Kind() == reflect.Ptr { - typeOfService = typeOfService.Elem() - } - pkgPath := typeOfService.PkgPath() - parts := strings.Split(pkgPath, "/") - name := strings.ToLower(parts[len(parts)-1]) - - // --- IPC Handler Discovery --- - instanceValue := reflect.ValueOf(serviceInstance) - handlerMethod := instanceValue.MethodByName("HandleIPCEvents") - if handlerMethod.IsValid() { - if handler, ok := handlerMethod.Interface().(func(*Core, Message) error); ok { - c.RegisterAction(handler) - } - } - - return c.RegisterService(name, serviceInstance) - } -} - -// WithName creates an option that registers a service with a specific name. -// This is useful when the service name cannot be inferred from the package path, -// such as when using anonymous functions as factories. -// Note: Unlike WithService, this does not automatically discover or register -// IPC handlers. If your service needs IPC handling, implement HandleIPCEvents -// and register it manually. -func WithName(name string, factory func(*Core) (any, error)) Option { - return func(c *Core) error { - serviceInstance, err := factory(c) - if err != nil { - return fmt.Errorf("core: failed to create service '%s': %w", name, err) - } - return c.RegisterService(name, serviceInstance) - } -} - -// WithApp creates an Option that injects the GUI runtime (e.g., Wails App) into the Core. -// This is essential for services that need to interact with the GUI runtime. -func WithApp(app any) Option { - return func(c *Core) error { - c.App = app - return nil - } -} - -// WithAssets creates an Option that registers the application's embedded assets. -// This is necessary for the application to be able to serve its frontend. -func WithAssets(fs embed.FS) Option { - return func(c *Core) error { - c.assets = fs - return nil - } -} - -// WithServiceLock creates an Option that prevents any further services from being -// registered after the Core has been initialized. This is a security measure to -// prevent late-binding of services that could have unintended consequences. -func WithServiceLock() Option { - return func(c *Core) error { - c.serviceLock = true - return nil - } -} - -// --- Core Methods --- - -// ServiceStartup is the entry point for the Core service's startup lifecycle. -// It is called by the GUI runtime when the application starts. -func (c *Core) ServiceStartup(ctx context.Context, options any) error { - c.serviceMu.RLock() - startables := append([]Startable(nil), c.startables...) - c.serviceMu.RUnlock() - - var agg error - for _, s := range startables { - if err := s.OnStartup(ctx); err != nil { - agg = errors.Join(agg, err) - } - } - - if err := c.ACTION(ActionServiceStartup{}); err != nil { - agg = errors.Join(agg, err) - } - - return agg -} - -// ServiceShutdown is the entry point for the Core service's shutdown lifecycle. -// It is called by the GUI runtime when the application shuts down. -func (c *Core) ServiceShutdown(ctx context.Context) error { - var agg error - if err := c.ACTION(ActionServiceShutdown{}); err != nil { - agg = errors.Join(agg, err) - } - - c.serviceMu.RLock() - stoppables := append([]Stoppable(nil), c.stoppables...) - c.serviceMu.RUnlock() - - for i := len(stoppables) - 1; i >= 0; i-- { - if err := stoppables[i].OnShutdown(ctx); err != nil { - agg = errors.Join(agg, err) - } - } - - return agg -} - -// ACTION dispatches a message to all registered IPC handlers. -// This is the primary mechanism for services to communicate with each other. -func (c *Core) ACTION(msg Message) error { - c.ipcMu.RLock() - handlers := append([]func(*Core, Message) error(nil), c.ipcHandlers...) - c.ipcMu.RUnlock() - - var agg error - for _, h := range handlers { - if err := h(c, msg); err != nil { - agg = fmt.Errorf("%w; %v", agg, err) - } - } - return agg -} - -// RegisterAction adds a new IPC handler to the Core. -func (c *Core) RegisterAction(handler func(*Core, Message) error) { - c.ipcMu.Lock() - c.ipcHandlers = append(c.ipcHandlers, handler) - c.ipcMu.Unlock() -} - -// RegisterActions adds multiple IPC handlers to the Core. -func (c *Core) RegisterActions(handlers ...func(*Core, Message) error) { - c.ipcMu.Lock() - c.ipcHandlers = append(c.ipcHandlers, handlers...) - c.ipcMu.Unlock() -} - -// QUERY dispatches a query to handlers until one responds. -// Returns (result, handled, error). If no handler responds, handled is false. -func (c *Core) QUERY(q Query) (any, bool, error) { - c.queryMu.RLock() - handlers := append([]QueryHandler(nil), c.queryHandlers...) - c.queryMu.RUnlock() - - for _, h := range handlers { - result, handled, err := h(c, q) - if handled { - return result, true, err - } - } - return nil, false, nil -} - -// QUERYALL dispatches a query to all handlers and collects all responses. -// Returns all results from handlers that responded. -func (c *Core) QUERYALL(q Query) ([]any, error) { - c.queryMu.RLock() - handlers := append([]QueryHandler(nil), c.queryHandlers...) - c.queryMu.RUnlock() - - var results []any - var agg error - for _, h := range handlers { - result, handled, err := h(c, q) - if err != nil { - agg = errors.Join(agg, err) - } - if handled && result != nil { - results = append(results, result) - } - } - return results, agg -} - -// PERFORM dispatches a task to handlers until one executes it. -// Returns (result, handled, error). If no handler responds, handled is false. -func (c *Core) PERFORM(t Task) (any, bool, error) { - c.taskMu.RLock() - handlers := append([]TaskHandler(nil), c.taskHandlers...) - c.taskMu.RUnlock() - - for _, h := range handlers { - result, handled, err := h(c, t) - if handled { - return result, true, err - } - } - return nil, false, nil -} - -// RegisterQuery adds a query handler to the Core. -func (c *Core) RegisterQuery(handler QueryHandler) { - c.queryMu.Lock() - c.queryHandlers = append(c.queryHandlers, handler) - c.queryMu.Unlock() -} - -// RegisterTask adds a task handler to the Core. -func (c *Core) RegisterTask(handler TaskHandler) { - c.taskMu.Lock() - c.taskHandlers = append(c.taskHandlers, handler) - c.taskMu.Unlock() -} - -// RegisterService adds a new service to the Core. -func (c *Core) RegisterService(name string, api any) error { - if c.servicesLocked { - return fmt.Errorf("core: service %q is not permitted by the serviceLock setting", name) - } - if name == "" { - return errors.New("core: service name cannot be empty") - } - c.serviceMu.Lock() - defer c.serviceMu.Unlock() - if _, exists := c.services[name]; exists { - return fmt.Errorf("core: service %q already registered", name) - } - c.services[name] = api - - if s, ok := api.(Startable); ok { - c.startables = append(c.startables, s) - } - if s, ok := api.(Stoppable); ok { - c.stoppables = append(c.stoppables, s) - } - - return nil -} - -// Service retrieves a registered service by name. -// It returns nil if the service is not found. -func (c *Core) Service(name string) any { - c.serviceMu.RLock() - api, ok := c.services[name] - c.serviceMu.RUnlock() - if !ok { - return nil - } - return api -} - -// ServiceFor retrieves a registered service by name and asserts its type to the given interface T. -func ServiceFor[T any](c *Core, name string) (T, error) { - var zero T - raw := c.Service(name) - if raw == nil { - return zero, fmt.Errorf("service '%s' not found", name) - } - typed, ok := raw.(T) - if !ok { - return zero, fmt.Errorf("service '%s' is of type %T, but expected %T", name, raw, zero) - } - return typed, nil -} - -// MustServiceFor retrieves a registered service by name and asserts its type to the given interface T. -// It panics if the service is not found or cannot be cast to T. -func MustServiceFor[T any](c *Core, name string) T { - svc, err := ServiceFor[T](c, name) - if err != nil { - panic(err) - } - return svc -} - -// App returns the global application instance. -// It panics if the Core has not been initialized via SetInstance. -// This is typically used by GUI runtimes that need global access. -func App() any { - if instance == nil { - panic("core.App() called before core.SetInstance()") - } - return instance.App -} - -// SetInstance sets the global Core instance for App() access. -// This is typically called by GUI runtimes during initialization. -func SetInstance(c *Core) { - instance = c -} - -// Config returns the registered Config service. -func (c *Core) Config() Config { - cfg := MustServiceFor[Config](c, "config") - return cfg -} - -// Display returns the registered Display service. -func (c *Core) Display() Display { - d := MustServiceFor[Display](c, "display") - return d -} - -func (c *Core) Core() *Core { return c } - -// Assets returns the embedded filesystem containing the application's assets. -func (c *Core) Assets() embed.FS { - return c.assets -} diff --git a/pkg/framework/core/core_extra_test.go b/pkg/framework/core/core_extra_test.go deleted file mode 100644 index 38da57f..0000000 --- a/pkg/framework/core/core_extra_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package core - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -type MockServiceWithIPC struct { - MockService - handled bool -} - -func (m *MockServiceWithIPC) HandleIPCEvents(c *Core, msg Message) error { - m.handled = true - return nil -} - -func TestCore_WithService_IPC(t *testing.T) { - svc := &MockServiceWithIPC{MockService: MockService{Name: "ipc-service"}} - factory := func(c *Core) (any, error) { - return svc, nil - } - c, err := New(WithService(factory)) - assert.NoError(t, err) - - // Trigger ACTION to verify handler was registered - err = c.ACTION(nil) - assert.NoError(t, err) - assert.True(t, svc.handled) -} - -func TestCore_ACTION_Bad(t *testing.T) { - c, err := New() - assert.NoError(t, err) - errHandler := func(c *Core, msg Message) error { - return assert.AnError - } - c.RegisterAction(errHandler) - err = c.ACTION(nil) - assert.Error(t, err) - assert.Contains(t, err.Error(), assert.AnError.Error()) -} diff --git a/pkg/framework/core/core_lifecycle_test.go b/pkg/framework/core/core_lifecycle_test.go deleted file mode 100644 index 3982a36..0000000 --- a/pkg/framework/core/core_lifecycle_test.go +++ /dev/null @@ -1,163 +0,0 @@ -package core - -import ( - "context" - "errors" - "testing" - - "github.com/stretchr/testify/assert" -) - -type MockStartable struct { - started bool - err error -} - -func (m *MockStartable) OnStartup(ctx context.Context) error { - m.started = true - return m.err -} - -type MockStoppable struct { - stopped bool - err error -} - -func (m *MockStoppable) OnShutdown(ctx context.Context) error { - m.stopped = true - return m.err -} - -type MockLifecycle struct { - MockStartable - MockStoppable -} - -func TestCore_LifecycleInterfaces(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - startable := &MockStartable{} - stoppable := &MockStoppable{} - lifecycle := &MockLifecycle{} - - // Register services - err = c.RegisterService("startable", startable) - assert.NoError(t, err) - err = c.RegisterService("stoppable", stoppable) - assert.NoError(t, err) - err = c.RegisterService("lifecycle", lifecycle) - assert.NoError(t, err) - - // Startup - err = c.ServiceStartup(context.Background(), nil) - assert.NoError(t, err) - assert.True(t, startable.started) - assert.True(t, lifecycle.started) - assert.False(t, stoppable.stopped) - - // Shutdown - err = c.ServiceShutdown(context.Background()) - assert.NoError(t, err) - assert.True(t, stoppable.stopped) - assert.True(t, lifecycle.stopped) -} - -type MockLifecycleWithLog struct { - id string - log *[]string -} - -func (m *MockLifecycleWithLog) OnStartup(ctx context.Context) error { - *m.log = append(*m.log, "start-"+m.id) - return nil -} - -func (m *MockLifecycleWithLog) OnShutdown(ctx context.Context) error { - *m.log = append(*m.log, "stop-"+m.id) - return nil -} - -func TestCore_LifecycleOrder(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - var callOrder []string - - s1 := &MockLifecycleWithLog{id: "1", log: &callOrder} - s2 := &MockLifecycleWithLog{id: "2", log: &callOrder} - - err = c.RegisterService("s1", s1) - assert.NoError(t, err) - err = c.RegisterService("s2", s2) - assert.NoError(t, err) - - // Startup - err = c.ServiceStartup(context.Background(), nil) - assert.NoError(t, err) - assert.Equal(t, []string{"start-1", "start-2"}, callOrder) - - // Reset log - callOrder = nil - - // Shutdown - err = c.ServiceShutdown(context.Background()) - assert.NoError(t, err) - assert.Equal(t, []string{"stop-2", "stop-1"}, callOrder) -} - -func TestCore_LifecycleErrors(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - s1 := &MockStartable{err: assert.AnError} - s2 := &MockStoppable{err: assert.AnError} - - c.RegisterService("s1", s1) - c.RegisterService("s2", s2) - - err = c.ServiceStartup(context.Background(), nil) - assert.Error(t, err) - assert.ErrorIs(t, err, assert.AnError) - - err = c.ServiceShutdown(context.Background()) - assert.Error(t, err) - assert.ErrorIs(t, err, assert.AnError) -} - -func TestCore_LifecycleErrors_Aggregated(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - // Register action that fails - c.RegisterAction(func(c *Core, msg Message) error { - if _, ok := msg.(ActionServiceStartup); ok { - return errors.New("startup action error") - } - if _, ok := msg.(ActionServiceShutdown); ok { - return errors.New("shutdown action error") - } - return nil - }) - - // Register service that fails - s1 := &MockStartable{err: errors.New("startup service error")} - s2 := &MockStoppable{err: errors.New("shutdown service error")} - - err = c.RegisterService("s1", s1) - assert.NoError(t, err) - err = c.RegisterService("s2", s2) - assert.NoError(t, err) - - // Startup - err = c.ServiceStartup(context.Background(), nil) - assert.Error(t, err) - assert.Contains(t, err.Error(), "startup action error") - assert.Contains(t, err.Error(), "startup service error") - - // Shutdown - err = c.ServiceShutdown(context.Background()) - assert.Error(t, err) - assert.Contains(t, err.Error(), "shutdown action error") - assert.Contains(t, err.Error(), "shutdown service error") -} diff --git a/pkg/framework/core/core_test.go b/pkg/framework/core/core_test.go deleted file mode 100644 index 6dbdaec..0000000 --- a/pkg/framework/core/core_test.go +++ /dev/null @@ -1,297 +0,0 @@ -package core - -import ( - "embed" - "io" - "testing" - - "github.com/stretchr/testify/assert" -) - -// mockApp is a simple mock for testing app injection -type mockApp struct{} - -func TestCore_New_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - assert.NotNil(t, c) -} - -// Mock service for testing -type MockService struct { - Name string -} - -func (m *MockService) GetName() string { - return m.Name -} - -func TestCore_WithService_Good(t *testing.T) { - factory := func(c *Core) (any, error) { - return &MockService{Name: "test"}, nil - } - c, err := New(WithService(factory)) - assert.NoError(t, err) - svc := c.Service("core") - assert.NotNil(t, svc) - mockSvc, ok := svc.(*MockService) - assert.True(t, ok) - assert.Equal(t, "test", mockSvc.GetName()) -} - -func TestCore_WithService_Bad(t *testing.T) { - factory := func(c *Core) (any, error) { - return nil, assert.AnError - } - _, err := New(WithService(factory)) - assert.Error(t, err) - assert.ErrorIs(t, err, assert.AnError) -} - -type MockConfigService struct{} - -func (m *MockConfigService) Get(key string, out any) error { return nil } -func (m *MockConfigService) Set(key string, v any) error { return nil } - -type MockDisplayService struct{} - -func (m *MockDisplayService) OpenWindow(opts ...WindowOption) error { return nil } - -func TestCore_Services_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - err = c.RegisterService("config", &MockConfigService{}) - assert.NoError(t, err) - - err = c.RegisterService("display", &MockDisplayService{}) - assert.NoError(t, err) - - assert.NotNil(t, c.Config()) - assert.NotNil(t, c.Display()) -} - -func TestCore_Services_Ugly(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - assert.Panics(t, func() { - c.Config() - }) - assert.Panics(t, func() { - c.Display() - }) -} - -func TestCore_App_Good(t *testing.T) { - app := &mockApp{} - c, err := New(WithApp(app)) - assert.NoError(t, err) - - // To test the global App() function, we need to set the global instance. - originalInstance := instance - instance = c - defer func() { instance = originalInstance }() - - assert.Equal(t, app, App()) -} - -func TestCore_App_Ugly(t *testing.T) { - // This test ensures that calling App() before the core is initialized panics. - originalInstance := instance - instance = nil - defer func() { instance = originalInstance }() - assert.Panics(t, func() { - App() - }) -} - -func TestCore_Core_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - assert.Equal(t, c, c.Core()) -} - -func TestFeatures_IsEnabled_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - c.Features.Flags = []string{"feature1", "feature2"} - - assert.True(t, c.Features.IsEnabled("feature1")) - assert.True(t, c.Features.IsEnabled("feature2")) - assert.False(t, c.Features.IsEnabled("feature3")) -} - -type startupMessage struct{} -type shutdownMessage struct{} - -func TestCore_ServiceLifecycle_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - var messageReceived Message - handler := func(c *Core, msg Message) error { - messageReceived = msg - return nil - } - c.RegisterAction(handler) - - // Test Startup - _ = c.ServiceStartup(nil, nil) - _, ok := messageReceived.(ActionServiceStartup) - assert.True(t, ok, "expected ActionServiceStartup message") - - // Test Shutdown - _ = c.ServiceShutdown(nil) - _, ok = messageReceived.(ActionServiceShutdown) - assert.True(t, ok, "expected ActionServiceShutdown message") -} - -func TestCore_WithApp_Good(t *testing.T) { - app := &mockApp{} - c, err := New(WithApp(app)) - assert.NoError(t, err) - assert.Equal(t, app, c.App) -} - -//go:embed testdata -var testFS embed.FS - -func TestCore_WithAssets_Good(t *testing.T) { - c, err := New(WithAssets(testFS)) - assert.NoError(t, err) - assets := c.Assets() - file, err := assets.Open("testdata/test.txt") - assert.NoError(t, err) - defer file.Close() - content, err := io.ReadAll(file) - assert.NoError(t, err) - assert.Equal(t, "hello from testdata\n", string(content)) -} - -func TestCore_WithServiceLock_Good(t *testing.T) { - c, err := New(WithServiceLock()) - assert.NoError(t, err) - err = c.RegisterService("test", &MockService{}) - assert.Error(t, err) -} - -func TestCore_RegisterService_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - err = c.RegisterService("test", &MockService{Name: "test"}) - assert.NoError(t, err) - svc := c.Service("test") - assert.NotNil(t, svc) - mockSvc, ok := svc.(*MockService) - assert.True(t, ok) - assert.Equal(t, "test", mockSvc.GetName()) -} - -func TestCore_RegisterService_Bad(t *testing.T) { - c, err := New() - assert.NoError(t, err) - err = c.RegisterService("test", &MockService{}) - assert.NoError(t, err) - err = c.RegisterService("test", &MockService{}) - assert.Error(t, err) - err = c.RegisterService("", &MockService{}) - assert.Error(t, err) -} - -func TestCore_ServiceFor_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - err = c.RegisterService("test", &MockService{Name: "test"}) - assert.NoError(t, err) - svc, err := ServiceFor[*MockService](c, "test") - assert.NoError(t, err) - assert.Equal(t, "test", svc.GetName()) -} - -func TestCore_ServiceFor_Bad(t *testing.T) { - c, err := New() - assert.NoError(t, err) - _, err = ServiceFor[*MockService](c, "nonexistent") - assert.Error(t, err) - err = c.RegisterService("test", "not a service") - assert.NoError(t, err) - _, err = ServiceFor[*MockService](c, "test") - assert.Error(t, err) -} - -func TestCore_MustServiceFor_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - err = c.RegisterService("test", &MockService{Name: "test"}) - assert.NoError(t, err) - svc := MustServiceFor[*MockService](c, "test") - assert.Equal(t, "test", svc.GetName()) -} - -func TestCore_MustServiceFor_Ugly(t *testing.T) { - c, err := New() - assert.NoError(t, err) - assert.Panics(t, func() { - MustServiceFor[*MockService](c, "nonexistent") - }) - err = c.RegisterService("test", "not a service") - assert.NoError(t, err) - assert.Panics(t, func() { - MustServiceFor[*MockService](c, "test") - }) -} - -type MockAction struct { - handled bool -} - -func (a *MockAction) Handle(c *Core, msg Message) error { - a.handled = true - return nil -} - -func TestCore_ACTION_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - action := &MockAction{} - c.RegisterAction(action.Handle) - err = c.ACTION(nil) - assert.NoError(t, err) - assert.True(t, action.handled) -} - -func TestCore_RegisterActions_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - action1 := &MockAction{} - action2 := &MockAction{} - c.RegisterActions(action1.Handle, action2.Handle) - err = c.ACTION(nil) - assert.NoError(t, err) - assert.True(t, action1.handled) - assert.True(t, action2.handled) -} - -func TestCore_WithName_Good(t *testing.T) { - factory := func(c *Core) (any, error) { - return &MockService{Name: "test"}, nil - } - c, err := New(WithName("my-service", factory)) - assert.NoError(t, err) - svc := c.Service("my-service") - assert.NotNil(t, svc) - mockSvc, ok := svc.(*MockService) - assert.True(t, ok) - assert.Equal(t, "test", mockSvc.GetName()) -} - -func TestCore_WithName_Bad(t *testing.T) { - factory := func(c *Core) (any, error) { - return nil, assert.AnError - } - _, err := New(WithName("my-service", factory)) - assert.Error(t, err) - assert.ErrorIs(t, err, assert.AnError) -} diff --git a/pkg/framework/core/docs/site/404.html b/pkg/framework/core/docs/site/404.html deleted file mode 100644 index e0fae56..0000000 --- a/pkg/framework/core/docs/site/404.html +++ /dev/null @@ -1,707 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - Core.Help - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -
-
- -
- - - - -
- - -
- -
- - - - - - - - - -
-
- - - -
-
-
- - - - - - - -
-
-
- - - -
-
-
- - - -
-
-
- - - -
- - - - - - - - - -
- -

404 - Not found

- -
-
- - - - - -
- - - -
- -
- - -
- -
-
-
-
- - - - - - - - - \ No newline at end of file diff --git a/pkg/framework/core/docs/site/assets/external/fonts.googleapis.com/css.49ea35f2.css b/pkg/framework/core/docs/site/assets/external/fonts.googleapis.com/css.49ea35f2.css deleted file mode 100644 index d5c0c14..0000000 --- a/pkg/framework/core/docs/site/assets/external/fonts.googleapis.com/css.49ea35f2.css +++ /dev/null @@ -1,756 +0,0 @@ -/* cyrillic-ext */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkC3kaWzU.woff2) format('woff2'); - unicode-range: U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F; -} -/* cyrillic */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkAnkaWzU.woff2) format('woff2'); - unicode-range: U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; -} -/* greek-ext */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCnkaWzU.woff2) format('woff2'); - unicode-range: U+1F00-1FFF; -} -/* greek */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkBXkaWzU.woff2) format('woff2'); - unicode-range: U+0370-0377, U+037A-037F, U+0384-038A, U+038C, U+038E-03A1, U+03A3-03FF; -} -/* math */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkenkaWzU.woff2) format('woff2'); - unicode-range: U+0302-0303, U+0305, U+0307-0308, U+0310, U+0312, U+0315, U+031A, U+0326-0327, U+032C, U+032F-0330, U+0332-0333, U+0338, U+033A, U+0346, U+034D, U+0391-03A1, U+03A3-03A9, U+03B1-03C9, U+03D1, U+03D5-03D6, U+03F0-03F1, U+03F4-03F5, U+2016-2017, U+2034-2038, U+203C, U+2040, U+2043, U+2047, U+2050, U+2057, U+205F, U+2070-2071, U+2074-208E, U+2090-209C, U+20D0-20DC, U+20E1, U+20E5-20EF, U+2100-2112, U+2114-2115, U+2117-2121, U+2123-214F, U+2190, U+2192, U+2194-21AE, U+21B0-21E5, U+21F1-21F2, U+21F4-2211, U+2213-2214, U+2216-22FF, U+2308-230B, U+2310, U+2319, U+231C-2321, U+2336-237A, U+237C, U+2395, U+239B-23B7, U+23D0, U+23DC-23E1, U+2474-2475, U+25AF, U+25B3, U+25B7, U+25BD, U+25C1, U+25CA, U+25CC, U+25FB, U+266D-266F, U+27C0-27FF, U+2900-2AFF, U+2B0E-2B11, U+2B30-2B4C, U+2BFE, U+3030, U+FF5B, U+FF5D, U+1D400-1D7FF, U+1EE00-1EEFF; -} -/* symbols */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkaHkaWzU.woff2) format('woff2'); - unicode-range: U+0001-000C, U+000E-001F, U+007F-009F, U+20DD-20E0, U+20E2-20E4, U+2150-218F, U+2190, U+2192, U+2194-2199, U+21AF, U+21E6-21F0, U+21F3, U+2218-2219, U+2299, U+22C4-22C6, U+2300-243F, U+2440-244A, U+2460-24FF, U+25A0-27BF, U+2800-28FF, U+2921-2922, U+2981, U+29BF, U+29EB, U+2B00-2BFF, U+4DC0-4DFF, U+FFF9-FFFB, U+10140-1018E, U+10190-1019C, U+101A0, U+101D0-101FD, U+102E0-102FB, U+10E60-10E7E, U+1D2C0-1D2D3, U+1D2E0-1D37F, U+1F000-1F0FF, U+1F100-1F1AD, U+1F1E6-1F1FF, U+1F30D-1F30F, U+1F315, U+1F31C, U+1F31E, U+1F320-1F32C, U+1F336, U+1F378, U+1F37D, U+1F382, U+1F393-1F39F, U+1F3A7-1F3A8, U+1F3AC-1F3AF, U+1F3C2, U+1F3C4-1F3C6, U+1F3CA-1F3CE, U+1F3D4-1F3E0, U+1F3ED, U+1F3F1-1F3F3, U+1F3F5-1F3F7, U+1F408, U+1F415, U+1F41F, U+1F426, U+1F43F, U+1F441-1F442, U+1F444, U+1F446-1F449, U+1F44C-1F44E, U+1F453, U+1F46A, U+1F47D, U+1F4A3, U+1F4B0, U+1F4B3, U+1F4B9, U+1F4BB, U+1F4BF, U+1F4C8-1F4CB, U+1F4D6, U+1F4DA, U+1F4DF, U+1F4E3-1F4E6, U+1F4EA-1F4ED, U+1F4F7, U+1F4F9-1F4FB, U+1F4FD-1F4FE, U+1F503, U+1F507-1F50B, U+1F50D, U+1F512-1F513, U+1F53E-1F54A, U+1F54F-1F5FA, U+1F610, U+1F650-1F67F, U+1F687, U+1F68D, U+1F691, U+1F694, U+1F698, U+1F6AD, U+1F6B2, U+1F6B9-1F6BA, U+1F6BC, U+1F6C6-1F6CF, U+1F6D3-1F6D7, U+1F6E0-1F6EA, U+1F6F0-1F6F3, U+1F6F7-1F6FC, U+1F700-1F7FF, U+1F800-1F80B, U+1F810-1F847, U+1F850-1F859, U+1F860-1F887, U+1F890-1F8AD, U+1F8B0-1F8BB, U+1F8C0-1F8C1, U+1F900-1F90B, U+1F93B, U+1F946, U+1F984, U+1F996, U+1F9E9, U+1FA00-1FA6F, U+1FA70-1FA7C, U+1FA80-1FA89, U+1FA8F-1FAC6, U+1FACE-1FADC, U+1FADF-1FAE9, U+1FAF0-1FAF8, U+1FB00-1FBFF; -} -/* vietnamese */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCXkaWzU.woff2) format('woff2'); - unicode-range: U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB; -} -/* latin-ext */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCHkaWzU.woff2) format('woff2'); - unicode-range: U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF; -} -/* latin */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkBnka.woff2) format('woff2'); - unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; -} -/* cyrillic-ext */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkC3kaWzU.woff2) format('woff2'); - unicode-range: U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F; -} -/* cyrillic */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkAnkaWzU.woff2) format('woff2'); - unicode-range: U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; -} -/* greek-ext */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCnkaWzU.woff2) format('woff2'); - unicode-range: U+1F00-1FFF; -} -/* greek */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkBXkaWzU.woff2) format('woff2'); - unicode-range: U+0370-0377, U+037A-037F, U+0384-038A, U+038C, U+038E-03A1, U+03A3-03FF; -} -/* math */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkenkaWzU.woff2) format('woff2'); - unicode-range: U+0302-0303, U+0305, U+0307-0308, U+0310, U+0312, U+0315, U+031A, U+0326-0327, U+032C, U+032F-0330, U+0332-0333, U+0338, U+033A, U+0346, U+034D, U+0391-03A1, U+03A3-03A9, U+03B1-03C9, U+03D1, U+03D5-03D6, U+03F0-03F1, U+03F4-03F5, U+2016-2017, U+2034-2038, U+203C, U+2040, U+2043, U+2047, U+2050, U+2057, U+205F, U+2070-2071, U+2074-208E, U+2090-209C, U+20D0-20DC, U+20E1, U+20E5-20EF, U+2100-2112, U+2114-2115, U+2117-2121, U+2123-214F, U+2190, U+2192, U+2194-21AE, U+21B0-21E5, U+21F1-21F2, U+21F4-2211, U+2213-2214, U+2216-22FF, U+2308-230B, U+2310, U+2319, U+231C-2321, U+2336-237A, U+237C, U+2395, U+239B-23B7, U+23D0, U+23DC-23E1, U+2474-2475, U+25AF, U+25B3, U+25B7, U+25BD, U+25C1, U+25CA, U+25CC, U+25FB, U+266D-266F, U+27C0-27FF, U+2900-2AFF, U+2B0E-2B11, U+2B30-2B4C, U+2BFE, U+3030, U+FF5B, U+FF5D, U+1D400-1D7FF, U+1EE00-1EEFF; -} -/* symbols */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkaHkaWzU.woff2) format('woff2'); - unicode-range: U+0001-000C, U+000E-001F, U+007F-009F, U+20DD-20E0, U+20E2-20E4, U+2150-218F, U+2190, U+2192, U+2194-2199, U+21AF, U+21E6-21F0, U+21F3, U+2218-2219, U+2299, U+22C4-22C6, U+2300-243F, U+2440-244A, U+2460-24FF, U+25A0-27BF, U+2800-28FF, U+2921-2922, U+2981, U+29BF, U+29EB, U+2B00-2BFF, U+4DC0-4DFF, U+FFF9-FFFB, U+10140-1018E, U+10190-1019C, U+101A0, U+101D0-101FD, U+102E0-102FB, U+10E60-10E7E, U+1D2C0-1D2D3, U+1D2E0-1D37F, U+1F000-1F0FF, U+1F100-1F1AD, U+1F1E6-1F1FF, U+1F30D-1F30F, U+1F315, U+1F31C, U+1F31E, U+1F320-1F32C, U+1F336, U+1F378, U+1F37D, U+1F382, U+1F393-1F39F, U+1F3A7-1F3A8, U+1F3AC-1F3AF, U+1F3C2, U+1F3C4-1F3C6, U+1F3CA-1F3CE, U+1F3D4-1F3E0, U+1F3ED, U+1F3F1-1F3F3, U+1F3F5-1F3F7, U+1F408, U+1F415, U+1F41F, U+1F426, U+1F43F, U+1F441-1F442, U+1F444, U+1F446-1F449, U+1F44C-1F44E, U+1F453, U+1F46A, U+1F47D, U+1F4A3, U+1F4B0, U+1F4B3, U+1F4B9, U+1F4BB, U+1F4BF, U+1F4C8-1F4CB, U+1F4D6, U+1F4DA, U+1F4DF, U+1F4E3-1F4E6, U+1F4EA-1F4ED, U+1F4F7, U+1F4F9-1F4FB, U+1F4FD-1F4FE, U+1F503, U+1F507-1F50B, U+1F50D, U+1F512-1F513, U+1F53E-1F54A, U+1F54F-1F5FA, U+1F610, U+1F650-1F67F, U+1F687, U+1F68D, U+1F691, U+1F694, U+1F698, U+1F6AD, U+1F6B2, U+1F6B9-1F6BA, U+1F6BC, U+1F6C6-1F6CF, U+1F6D3-1F6D7, U+1F6E0-1F6EA, U+1F6F0-1F6F3, U+1F6F7-1F6FC, U+1F700-1F7FF, U+1F800-1F80B, U+1F810-1F847, U+1F850-1F859, U+1F860-1F887, U+1F890-1F8AD, U+1F8B0-1F8BB, U+1F8C0-1F8C1, U+1F900-1F90B, U+1F93B, U+1F946, U+1F984, U+1F996, U+1F9E9, U+1FA00-1FA6F, U+1FA70-1FA7C, U+1FA80-1FA89, U+1FA8F-1FAC6, U+1FACE-1FADC, U+1FADF-1FAE9, U+1FAF0-1FAF8, U+1FB00-1FBFF; -} -/* vietnamese */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCXkaWzU.woff2) format('woff2'); - unicode-range: U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB; -} -/* latin-ext */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCHkaWzU.woff2) format('woff2'); - unicode-range: U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF; -} -/* latin */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkBnka.woff2) format('woff2'); - unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; -} -/* cyrillic-ext */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkC3kaWzU.woff2) format('woff2'); - unicode-range: U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F; -} -/* cyrillic */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkAnkaWzU.woff2) format('woff2'); - unicode-range: U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; -} -/* greek-ext */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCnkaWzU.woff2) format('woff2'); - unicode-range: U+1F00-1FFF; -} -/* greek */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkBXkaWzU.woff2) format('woff2'); - unicode-range: U+0370-0377, U+037A-037F, U+0384-038A, U+038C, U+038E-03A1, U+03A3-03FF; -} -/* math */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkenkaWzU.woff2) format('woff2'); - unicode-range: U+0302-0303, U+0305, U+0307-0308, U+0310, U+0312, U+0315, U+031A, U+0326-0327, U+032C, U+032F-0330, U+0332-0333, U+0338, U+033A, U+0346, U+034D, U+0391-03A1, U+03A3-03A9, U+03B1-03C9, U+03D1, U+03D5-03D6, U+03F0-03F1, U+03F4-03F5, U+2016-2017, U+2034-2038, U+203C, U+2040, U+2043, U+2047, U+2050, U+2057, U+205F, U+2070-2071, U+2074-208E, U+2090-209C, U+20D0-20DC, U+20E1, U+20E5-20EF, U+2100-2112, U+2114-2115, U+2117-2121, U+2123-214F, U+2190, U+2192, U+2194-21AE, U+21B0-21E5, U+21F1-21F2, U+21F4-2211, U+2213-2214, U+2216-22FF, U+2308-230B, U+2310, U+2319, U+231C-2321, U+2336-237A, U+237C, U+2395, U+239B-23B7, U+23D0, U+23DC-23E1, U+2474-2475, U+25AF, U+25B3, U+25B7, U+25BD, U+25C1, U+25CA, U+25CC, U+25FB, U+266D-266F, U+27C0-27FF, U+2900-2AFF, U+2B0E-2B11, U+2B30-2B4C, U+2BFE, U+3030, U+FF5B, U+FF5D, U+1D400-1D7FF, U+1EE00-1EEFF; -} -/* symbols */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkaHkaWzU.woff2) format('woff2'); - unicode-range: U+0001-000C, U+000E-001F, U+007F-009F, U+20DD-20E0, U+20E2-20E4, U+2150-218F, U+2190, U+2192, U+2194-2199, U+21AF, U+21E6-21F0, U+21F3, U+2218-2219, U+2299, U+22C4-22C6, U+2300-243F, U+2440-244A, U+2460-24FF, U+25A0-27BF, U+2800-28FF, U+2921-2922, U+2981, U+29BF, U+29EB, U+2B00-2BFF, U+4DC0-4DFF, U+FFF9-FFFB, U+10140-1018E, U+10190-1019C, U+101A0, U+101D0-101FD, U+102E0-102FB, U+10E60-10E7E, U+1D2C0-1D2D3, U+1D2E0-1D37F, U+1F000-1F0FF, U+1F100-1F1AD, U+1F1E6-1F1FF, U+1F30D-1F30F, U+1F315, U+1F31C, U+1F31E, U+1F320-1F32C, U+1F336, U+1F378, U+1F37D, U+1F382, U+1F393-1F39F, U+1F3A7-1F3A8, U+1F3AC-1F3AF, U+1F3C2, U+1F3C4-1F3C6, U+1F3CA-1F3CE, U+1F3D4-1F3E0, U+1F3ED, U+1F3F1-1F3F3, U+1F3F5-1F3F7, U+1F408, U+1F415, U+1F41F, U+1F426, U+1F43F, U+1F441-1F442, U+1F444, U+1F446-1F449, U+1F44C-1F44E, U+1F453, U+1F46A, U+1F47D, U+1F4A3, U+1F4B0, U+1F4B3, U+1F4B9, U+1F4BB, U+1F4BF, U+1F4C8-1F4CB, U+1F4D6, U+1F4DA, U+1F4DF, U+1F4E3-1F4E6, U+1F4EA-1F4ED, U+1F4F7, U+1F4F9-1F4FB, U+1F4FD-1F4FE, U+1F503, U+1F507-1F50B, U+1F50D, U+1F512-1F513, U+1F53E-1F54A, U+1F54F-1F5FA, U+1F610, U+1F650-1F67F, U+1F687, U+1F68D, U+1F691, U+1F694, U+1F698, U+1F6AD, U+1F6B2, U+1F6B9-1F6BA, U+1F6BC, U+1F6C6-1F6CF, U+1F6D3-1F6D7, U+1F6E0-1F6EA, U+1F6F0-1F6F3, U+1F6F7-1F6FC, U+1F700-1F7FF, U+1F800-1F80B, U+1F810-1F847, U+1F850-1F859, U+1F860-1F887, U+1F890-1F8AD, U+1F8B0-1F8BB, U+1F8C0-1F8C1, U+1F900-1F90B, U+1F93B, U+1F946, U+1F984, U+1F996, U+1F9E9, U+1FA00-1FA6F, U+1FA70-1FA7C, U+1FA80-1FA89, U+1FA8F-1FAC6, U+1FACE-1FADC, U+1FADF-1FAE9, U+1FAF0-1FAF8, U+1FB00-1FBFF; -} -/* vietnamese */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCXkaWzU.woff2) format('woff2'); - unicode-range: U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB; -} -/* latin-ext */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCHkaWzU.woff2) format('woff2'); - unicode-range: U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF; -} -/* latin */ -@font-face { - font-family: 'Roboto'; - font-style: italic; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkBnka.woff2) format('woff2'); - unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; -} -/* cyrillic-ext */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3GUBGEe.woff2) format('woff2'); - unicode-range: U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F; -} -/* cyrillic */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3iUBGEe.woff2) format('woff2'); - unicode-range: U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; -} -/* greek-ext */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3CUBGEe.woff2) format('woff2'); - unicode-range: U+1F00-1FFF; -} -/* greek */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3-UBGEe.woff2) format('woff2'); - unicode-range: U+0370-0377, U+037A-037F, U+0384-038A, U+038C, U+038E-03A1, U+03A3-03FF; -} -/* math */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMawCUBGEe.woff2) format('woff2'); - unicode-range: U+0302-0303, U+0305, U+0307-0308, U+0310, U+0312, U+0315, U+031A, U+0326-0327, U+032C, U+032F-0330, U+0332-0333, U+0338, U+033A, U+0346, U+034D, U+0391-03A1, U+03A3-03A9, U+03B1-03C9, U+03D1, U+03D5-03D6, U+03F0-03F1, U+03F4-03F5, U+2016-2017, U+2034-2038, U+203C, U+2040, U+2043, U+2047, U+2050, U+2057, U+205F, U+2070-2071, U+2074-208E, U+2090-209C, U+20D0-20DC, U+20E1, U+20E5-20EF, U+2100-2112, U+2114-2115, U+2117-2121, U+2123-214F, U+2190, U+2192, U+2194-21AE, U+21B0-21E5, U+21F1-21F2, U+21F4-2211, U+2213-2214, U+2216-22FF, U+2308-230B, U+2310, U+2319, U+231C-2321, U+2336-237A, U+237C, U+2395, U+239B-23B7, U+23D0, U+23DC-23E1, U+2474-2475, U+25AF, U+25B3, U+25B7, U+25BD, U+25C1, U+25CA, U+25CC, U+25FB, U+266D-266F, U+27C0-27FF, U+2900-2AFF, U+2B0E-2B11, U+2B30-2B4C, U+2BFE, U+3030, U+FF5B, U+FF5D, U+1D400-1D7FF, U+1EE00-1EEFF; -} -/* symbols */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMaxKUBGEe.woff2) format('woff2'); - unicode-range: U+0001-000C, U+000E-001F, U+007F-009F, U+20DD-20E0, U+20E2-20E4, U+2150-218F, U+2190, U+2192, U+2194-2199, U+21AF, U+21E6-21F0, U+21F3, U+2218-2219, U+2299, U+22C4-22C6, U+2300-243F, U+2440-244A, U+2460-24FF, U+25A0-27BF, U+2800-28FF, U+2921-2922, U+2981, U+29BF, U+29EB, U+2B00-2BFF, U+4DC0-4DFF, U+FFF9-FFFB, U+10140-1018E, U+10190-1019C, U+101A0, U+101D0-101FD, U+102E0-102FB, U+10E60-10E7E, U+1D2C0-1D2D3, U+1D2E0-1D37F, U+1F000-1F0FF, U+1F100-1F1AD, U+1F1E6-1F1FF, U+1F30D-1F30F, U+1F315, U+1F31C, U+1F31E, U+1F320-1F32C, U+1F336, U+1F378, U+1F37D, U+1F382, U+1F393-1F39F, U+1F3A7-1F3A8, U+1F3AC-1F3AF, U+1F3C2, U+1F3C4-1F3C6, U+1F3CA-1F3CE, U+1F3D4-1F3E0, U+1F3ED, U+1F3F1-1F3F3, U+1F3F5-1F3F7, U+1F408, U+1F415, U+1F41F, U+1F426, U+1F43F, U+1F441-1F442, U+1F444, U+1F446-1F449, U+1F44C-1F44E, U+1F453, U+1F46A, U+1F47D, U+1F4A3, U+1F4B0, U+1F4B3, U+1F4B9, U+1F4BB, U+1F4BF, U+1F4C8-1F4CB, U+1F4D6, U+1F4DA, U+1F4DF, U+1F4E3-1F4E6, U+1F4EA-1F4ED, U+1F4F7, U+1F4F9-1F4FB, U+1F4FD-1F4FE, U+1F503, U+1F507-1F50B, U+1F50D, U+1F512-1F513, U+1F53E-1F54A, U+1F54F-1F5FA, U+1F610, U+1F650-1F67F, U+1F687, U+1F68D, U+1F691, U+1F694, U+1F698, U+1F6AD, U+1F6B2, U+1F6B9-1F6BA, U+1F6BC, U+1F6C6-1F6CF, U+1F6D3-1F6D7, U+1F6E0-1F6EA, U+1F6F0-1F6F3, U+1F6F7-1F6FC, U+1F700-1F7FF, U+1F800-1F80B, U+1F810-1F847, U+1F850-1F859, U+1F860-1F887, U+1F890-1F8AD, U+1F8B0-1F8BB, U+1F8C0-1F8C1, U+1F900-1F90B, U+1F93B, U+1F946, U+1F984, U+1F996, U+1F9E9, U+1FA00-1FA6F, U+1FA70-1FA7C, U+1FA80-1FA89, U+1FA8F-1FAC6, U+1FACE-1FADC, U+1FADF-1FAE9, U+1FAF0-1FAF8, U+1FB00-1FBFF; -} -/* vietnamese */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3OUBGEe.woff2) format('woff2'); - unicode-range: U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB; -} -/* latin-ext */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3KUBGEe.woff2) format('woff2'); - unicode-range: U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF; -} -/* latin */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 300; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3yUBA.woff2) format('woff2'); - unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; -} -/* cyrillic-ext */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3GUBGEe.woff2) format('woff2'); - unicode-range: U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F; -} -/* cyrillic */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3iUBGEe.woff2) format('woff2'); - unicode-range: U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; -} -/* greek-ext */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3CUBGEe.woff2) format('woff2'); - unicode-range: U+1F00-1FFF; -} -/* greek */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3-UBGEe.woff2) format('woff2'); - unicode-range: U+0370-0377, U+037A-037F, U+0384-038A, U+038C, U+038E-03A1, U+03A3-03FF; -} -/* math */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMawCUBGEe.woff2) format('woff2'); - unicode-range: U+0302-0303, U+0305, U+0307-0308, U+0310, U+0312, U+0315, U+031A, U+0326-0327, U+032C, U+032F-0330, U+0332-0333, U+0338, U+033A, U+0346, U+034D, U+0391-03A1, U+03A3-03A9, U+03B1-03C9, U+03D1, U+03D5-03D6, U+03F0-03F1, U+03F4-03F5, U+2016-2017, U+2034-2038, U+203C, U+2040, U+2043, U+2047, U+2050, U+2057, U+205F, U+2070-2071, U+2074-208E, U+2090-209C, U+20D0-20DC, U+20E1, U+20E5-20EF, U+2100-2112, U+2114-2115, U+2117-2121, U+2123-214F, U+2190, U+2192, U+2194-21AE, U+21B0-21E5, U+21F1-21F2, U+21F4-2211, U+2213-2214, U+2216-22FF, U+2308-230B, U+2310, U+2319, U+231C-2321, U+2336-237A, U+237C, U+2395, U+239B-23B7, U+23D0, U+23DC-23E1, U+2474-2475, U+25AF, U+25B3, U+25B7, U+25BD, U+25C1, U+25CA, U+25CC, U+25FB, U+266D-266F, U+27C0-27FF, U+2900-2AFF, U+2B0E-2B11, U+2B30-2B4C, U+2BFE, U+3030, U+FF5B, U+FF5D, U+1D400-1D7FF, U+1EE00-1EEFF; -} -/* symbols */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMaxKUBGEe.woff2) format('woff2'); - unicode-range: U+0001-000C, U+000E-001F, U+007F-009F, U+20DD-20E0, U+20E2-20E4, U+2150-218F, U+2190, U+2192, U+2194-2199, U+21AF, U+21E6-21F0, U+21F3, U+2218-2219, U+2299, U+22C4-22C6, U+2300-243F, U+2440-244A, U+2460-24FF, U+25A0-27BF, U+2800-28FF, U+2921-2922, U+2981, U+29BF, U+29EB, U+2B00-2BFF, U+4DC0-4DFF, U+FFF9-FFFB, U+10140-1018E, U+10190-1019C, U+101A0, U+101D0-101FD, U+102E0-102FB, U+10E60-10E7E, U+1D2C0-1D2D3, U+1D2E0-1D37F, U+1F000-1F0FF, U+1F100-1F1AD, U+1F1E6-1F1FF, U+1F30D-1F30F, U+1F315, U+1F31C, U+1F31E, U+1F320-1F32C, U+1F336, U+1F378, U+1F37D, U+1F382, U+1F393-1F39F, U+1F3A7-1F3A8, U+1F3AC-1F3AF, U+1F3C2, U+1F3C4-1F3C6, U+1F3CA-1F3CE, U+1F3D4-1F3E0, U+1F3ED, U+1F3F1-1F3F3, U+1F3F5-1F3F7, U+1F408, U+1F415, U+1F41F, U+1F426, U+1F43F, U+1F441-1F442, U+1F444, U+1F446-1F449, U+1F44C-1F44E, U+1F453, U+1F46A, U+1F47D, U+1F4A3, U+1F4B0, U+1F4B3, U+1F4B9, U+1F4BB, U+1F4BF, U+1F4C8-1F4CB, U+1F4D6, U+1F4DA, U+1F4DF, U+1F4E3-1F4E6, U+1F4EA-1F4ED, U+1F4F7, U+1F4F9-1F4FB, U+1F4FD-1F4FE, U+1F503, U+1F507-1F50B, U+1F50D, U+1F512-1F513, U+1F53E-1F54A, U+1F54F-1F5FA, U+1F610, U+1F650-1F67F, U+1F687, U+1F68D, U+1F691, U+1F694, U+1F698, U+1F6AD, U+1F6B2, U+1F6B9-1F6BA, U+1F6BC, U+1F6C6-1F6CF, U+1F6D3-1F6D7, U+1F6E0-1F6EA, U+1F6F0-1F6F3, U+1F6F7-1F6FC, U+1F700-1F7FF, U+1F800-1F80B, U+1F810-1F847, U+1F850-1F859, U+1F860-1F887, U+1F890-1F8AD, U+1F8B0-1F8BB, U+1F8C0-1F8C1, U+1F900-1F90B, U+1F93B, U+1F946, U+1F984, U+1F996, U+1F9E9, U+1FA00-1FA6F, U+1FA70-1FA7C, U+1FA80-1FA89, U+1FA8F-1FAC6, U+1FACE-1FADC, U+1FADF-1FAE9, U+1FAF0-1FAF8, U+1FB00-1FBFF; -} -/* vietnamese */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3OUBGEe.woff2) format('woff2'); - unicode-range: U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB; -} -/* latin-ext */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3KUBGEe.woff2) format('woff2'); - unicode-range: U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF; -} -/* latin */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 400; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3yUBA.woff2) format('woff2'); - unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; -} -/* cyrillic-ext */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3GUBGEe.woff2) format('woff2'); - unicode-range: U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F; -} -/* cyrillic */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3iUBGEe.woff2) format('woff2'); - unicode-range: U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; -} -/* greek-ext */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3CUBGEe.woff2) format('woff2'); - unicode-range: U+1F00-1FFF; -} -/* greek */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3-UBGEe.woff2) format('woff2'); - unicode-range: U+0370-0377, U+037A-037F, U+0384-038A, U+038C, U+038E-03A1, U+03A3-03FF; -} -/* math */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMawCUBGEe.woff2) format('woff2'); - unicode-range: U+0302-0303, U+0305, U+0307-0308, U+0310, U+0312, U+0315, U+031A, U+0326-0327, U+032C, U+032F-0330, U+0332-0333, U+0338, U+033A, U+0346, U+034D, U+0391-03A1, U+03A3-03A9, U+03B1-03C9, U+03D1, U+03D5-03D6, U+03F0-03F1, U+03F4-03F5, U+2016-2017, U+2034-2038, U+203C, U+2040, U+2043, U+2047, U+2050, U+2057, U+205F, U+2070-2071, U+2074-208E, U+2090-209C, U+20D0-20DC, U+20E1, U+20E5-20EF, U+2100-2112, U+2114-2115, U+2117-2121, U+2123-214F, U+2190, U+2192, U+2194-21AE, U+21B0-21E5, U+21F1-21F2, U+21F4-2211, U+2213-2214, U+2216-22FF, U+2308-230B, U+2310, U+2319, U+231C-2321, U+2336-237A, U+237C, U+2395, U+239B-23B7, U+23D0, U+23DC-23E1, U+2474-2475, U+25AF, U+25B3, U+25B7, U+25BD, U+25C1, U+25CA, U+25CC, U+25FB, U+266D-266F, U+27C0-27FF, U+2900-2AFF, U+2B0E-2B11, U+2B30-2B4C, U+2BFE, U+3030, U+FF5B, U+FF5D, U+1D400-1D7FF, U+1EE00-1EEFF; -} -/* symbols */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMaxKUBGEe.woff2) format('woff2'); - unicode-range: U+0001-000C, U+000E-001F, U+007F-009F, U+20DD-20E0, U+20E2-20E4, U+2150-218F, U+2190, U+2192, U+2194-2199, U+21AF, U+21E6-21F0, U+21F3, U+2218-2219, U+2299, U+22C4-22C6, U+2300-243F, U+2440-244A, U+2460-24FF, U+25A0-27BF, U+2800-28FF, U+2921-2922, U+2981, U+29BF, U+29EB, U+2B00-2BFF, U+4DC0-4DFF, U+FFF9-FFFB, U+10140-1018E, U+10190-1019C, U+101A0, U+101D0-101FD, U+102E0-102FB, U+10E60-10E7E, U+1D2C0-1D2D3, U+1D2E0-1D37F, U+1F000-1F0FF, U+1F100-1F1AD, U+1F1E6-1F1FF, U+1F30D-1F30F, U+1F315, U+1F31C, U+1F31E, U+1F320-1F32C, U+1F336, U+1F378, U+1F37D, U+1F382, U+1F393-1F39F, U+1F3A7-1F3A8, U+1F3AC-1F3AF, U+1F3C2, U+1F3C4-1F3C6, U+1F3CA-1F3CE, U+1F3D4-1F3E0, U+1F3ED, U+1F3F1-1F3F3, U+1F3F5-1F3F7, U+1F408, U+1F415, U+1F41F, U+1F426, U+1F43F, U+1F441-1F442, U+1F444, U+1F446-1F449, U+1F44C-1F44E, U+1F453, U+1F46A, U+1F47D, U+1F4A3, U+1F4B0, U+1F4B3, U+1F4B9, U+1F4BB, U+1F4BF, U+1F4C8-1F4CB, U+1F4D6, U+1F4DA, U+1F4DF, U+1F4E3-1F4E6, U+1F4EA-1F4ED, U+1F4F7, U+1F4F9-1F4FB, U+1F4FD-1F4FE, U+1F503, U+1F507-1F50B, U+1F50D, U+1F512-1F513, U+1F53E-1F54A, U+1F54F-1F5FA, U+1F610, U+1F650-1F67F, U+1F687, U+1F68D, U+1F691, U+1F694, U+1F698, U+1F6AD, U+1F6B2, U+1F6B9-1F6BA, U+1F6BC, U+1F6C6-1F6CF, U+1F6D3-1F6D7, U+1F6E0-1F6EA, U+1F6F0-1F6F3, U+1F6F7-1F6FC, U+1F700-1F7FF, U+1F800-1F80B, U+1F810-1F847, U+1F850-1F859, U+1F860-1F887, U+1F890-1F8AD, U+1F8B0-1F8BB, U+1F8C0-1F8C1, U+1F900-1F90B, U+1F93B, U+1F946, U+1F984, U+1F996, U+1F9E9, U+1FA00-1FA6F, U+1FA70-1FA7C, U+1FA80-1FA89, U+1FA8F-1FAC6, U+1FACE-1FADC, U+1FADF-1FAE9, U+1FAF0-1FAF8, U+1FB00-1FBFF; -} -/* vietnamese */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3OUBGEe.woff2) format('woff2'); - unicode-range: U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB; -} -/* latin-ext */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3KUBGEe.woff2) format('woff2'); - unicode-range: U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF; -} -/* latin */ -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 700; - font-stretch: 100%; - font-display: fallback; - src: url(../fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3yUBA.woff2) format('woff2'); - unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; -} -/* cyrillic-ext */ -@font-face { - font-family: 'Roboto Mono'; - font-style: italic; - font-weight: 400; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3CWWoKC.woff2) format('woff2'); - unicode-range: U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F; -} -/* cyrillic */ -@font-face { - font-family: 'Roboto Mono'; - font-style: italic; - font-weight: 400; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3mWWoKC.woff2) format('woff2'); - unicode-range: U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; -} -/* greek */ -@font-face { - font-family: 'Roboto Mono'; - font-style: italic; - font-weight: 400; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm36WWoKC.woff2) format('woff2'); - unicode-range: U+0370-0377, U+037A-037F, U+0384-038A, U+038C, U+038E-03A1, U+03A3-03FF; -} -/* vietnamese */ -@font-face { - font-family: 'Roboto Mono'; - font-style: italic; - font-weight: 400; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3KWWoKC.woff2) format('woff2'); - unicode-range: U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB; -} -/* latin-ext */ -@font-face { - font-family: 'Roboto Mono'; - font-style: italic; - font-weight: 400; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3OWWoKC.woff2) format('woff2'); - unicode-range: U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF; -} -/* latin */ -@font-face { - font-family: 'Roboto Mono'; - font-style: italic; - font-weight: 400; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm32WWg.woff2) format('woff2'); - unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; -} -/* cyrillic-ext */ -@font-face { - font-family: 'Roboto Mono'; - font-style: italic; - font-weight: 700; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3CWWoKC.woff2) format('woff2'); - unicode-range: U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F; -} -/* cyrillic */ -@font-face { - font-family: 'Roboto Mono'; - font-style: italic; - font-weight: 700; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3mWWoKC.woff2) format('woff2'); - unicode-range: U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; -} -/* greek */ -@font-face { - font-family: 'Roboto Mono'; - font-style: italic; - font-weight: 700; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm36WWoKC.woff2) format('woff2'); - unicode-range: U+0370-0377, U+037A-037F, U+0384-038A, U+038C, U+038E-03A1, U+03A3-03FF; -} -/* vietnamese */ -@font-face { - font-family: 'Roboto Mono'; - font-style: italic; - font-weight: 700; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3KWWoKC.woff2) format('woff2'); - unicode-range: U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB; -} -/* latin-ext */ -@font-face { - font-family: 'Roboto Mono'; - font-style: italic; - font-weight: 700; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3OWWoKC.woff2) format('woff2'); - unicode-range: U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF; -} -/* latin */ -@font-face { - font-family: 'Roboto Mono'; - font-style: italic; - font-weight: 700; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm32WWg.woff2) format('woff2'); - unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; -} -/* cyrillic-ext */ -@font-face { - font-family: 'Roboto Mono'; - font-style: normal; - font-weight: 400; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhGq3-OXg.woff2) format('woff2'); - unicode-range: U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F; -} -/* cyrillic */ -@font-face { - font-family: 'Roboto Mono'; - font-style: normal; - font-weight: 400; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhPq3-OXg.woff2) format('woff2'); - unicode-range: U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; -} -/* greek */ -@font-face { - font-family: 'Roboto Mono'; - font-style: normal; - font-weight: 400; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhIq3-OXg.woff2) format('woff2'); - unicode-range: U+0370-0377, U+037A-037F, U+0384-038A, U+038C, U+038E-03A1, U+03A3-03FF; -} -/* vietnamese */ -@font-face { - font-family: 'Roboto Mono'; - font-style: normal; - font-weight: 400; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhEq3-OXg.woff2) format('woff2'); - unicode-range: U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB; -} -/* latin-ext */ -@font-face { - font-family: 'Roboto Mono'; - font-style: normal; - font-weight: 400; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhFq3-OXg.woff2) format('woff2'); - unicode-range: U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF; -} -/* latin */ -@font-face { - font-family: 'Roboto Mono'; - font-style: normal; - font-weight: 400; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhLq38.woff2) format('woff2'); - unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; -} -/* cyrillic-ext */ -@font-face { - font-family: 'Roboto Mono'; - font-style: normal; - font-weight: 700; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhGq3-OXg.woff2) format('woff2'); - unicode-range: U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F; -} -/* cyrillic */ -@font-face { - font-family: 'Roboto Mono'; - font-style: normal; - font-weight: 700; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhPq3-OXg.woff2) format('woff2'); - unicode-range: U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; -} -/* greek */ -@font-face { - font-family: 'Roboto Mono'; - font-style: normal; - font-weight: 700; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhIq3-OXg.woff2) format('woff2'); - unicode-range: U+0370-0377, U+037A-037F, U+0384-038A, U+038C, U+038E-03A1, U+03A3-03FF; -} -/* vietnamese */ -@font-face { - font-family: 'Roboto Mono'; - font-style: normal; - font-weight: 700; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhEq3-OXg.woff2) format('woff2'); - unicode-range: U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB; -} -/* latin-ext */ -@font-face { - font-family: 'Roboto Mono'; - font-style: normal; - font-weight: 700; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhFq3-OXg.woff2) format('woff2'); - unicode-range: U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF; -} -/* latin */ -@font-face { - font-family: 'Roboto Mono'; - font-style: normal; - font-weight: 700; - font-display: fallback; - src: url(../fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhLq38.woff2) format('woff2'); - unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; -} diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkAnkaWzU.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkAnkaWzU.woff2 deleted file mode 100644 index ab38fd5..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkAnkaWzU.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkBXkaWzU.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkBXkaWzU.woff2 deleted file mode 100644 index db65849..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkBXkaWzU.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkBnka.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkBnka.woff2 deleted file mode 100644 index 7c9cbed..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkBnka.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkC3kaWzU.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkC3kaWzU.woff2 deleted file mode 100644 index e0aa393..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkC3kaWzU.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCHkaWzU.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCHkaWzU.woff2 deleted file mode 100644 index b677130..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCHkaWzU.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCXkaWzU.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCXkaWzU.woff2 deleted file mode 100644 index 669ba79..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCXkaWzU.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCnkaWzU.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCnkaWzU.woff2 deleted file mode 100644 index 6cc1de8..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkCnkaWzU.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkaHkaWzU.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkaHkaWzU.woff2 deleted file mode 100644 index ded8a41..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkaHkaWzU.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkenkaWzU.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkenkaWzU.woff2 deleted file mode 100644 index dbac481..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO5CnqEu92Fr1Mu53ZEC9_Vu3r1gIhOszmkenkaWzU.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3-UBGEe.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3-UBGEe.woff2 deleted file mode 100644 index 8e0eec6..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3-UBGEe.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3CUBGEe.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3CUBGEe.woff2 deleted file mode 100644 index 0ddf16c..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3CUBGEe.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3GUBGEe.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3GUBGEe.woff2 deleted file mode 100644 index 7bd3c2e..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3GUBGEe.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3KUBGEe.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3KUBGEe.woff2 deleted file mode 100644 index 8e43aa4..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3KUBGEe.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3OUBGEe.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3OUBGEe.woff2 deleted file mode 100644 index 2c6ba19..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3OUBGEe.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3iUBGEe.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3iUBGEe.woff2 deleted file mode 100644 index 2f8b493..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3iUBGEe.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3yUBA.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3yUBA.woff2 deleted file mode 100644 index 7c16c79..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMa3yUBA.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMawCUBGEe.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMawCUBGEe.woff2 deleted file mode 100644 index c2788c7..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMawCUBGEe.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMaxKUBGEe.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMaxKUBGEe.woff2 deleted file mode 100644 index 528b3bf..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/roboto/v49/KFO7CnqEu92Fr1ME7kSn66aGLdTylUAMaxKUBGEe.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhEq3-OXg.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhEq3-OXg.woff2 deleted file mode 100644 index 2c06834..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhEq3-OXg.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhFq3-OXg.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhFq3-OXg.woff2 deleted file mode 100644 index 532a888..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhFq3-OXg.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhGq3-OXg.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhGq3-OXg.woff2 deleted file mode 100644 index b02e2d6..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhGq3-OXg.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhIq3-OXg.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhIq3-OXg.woff2 deleted file mode 100644 index ae2f9eb..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhIq3-OXg.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhLq38.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhLq38.woff2 deleted file mode 100644 index bfa169c..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhLq38.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhPq3-OXg.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhPq3-OXg.woff2 deleted file mode 100644 index 8a15f5c..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x5DF4xlVMF-BfR8bXMIjhPq3-OXg.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm32WWg.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm32WWg.woff2 deleted file mode 100644 index d1ee097..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm32WWg.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm36WWoKC.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm36WWoKC.woff2 deleted file mode 100644 index c8e6ed4..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm36WWoKC.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3CWWoKC.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3CWWoKC.woff2 deleted file mode 100644 index 1debc1b..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3CWWoKC.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3KWWoKC.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3KWWoKC.woff2 deleted file mode 100644 index 43f7516..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3KWWoKC.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3OWWoKC.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3OWWoKC.woff2 deleted file mode 100644 index 227f362..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3OWWoKC.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3mWWoKC.woff2 b/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3mWWoKC.woff2 deleted file mode 100644 index 10a65a7..0000000 Binary files a/pkg/framework/core/docs/site/assets/external/fonts.gstatic.com/s/robotomono/v31/L0x7DF4xlVMF-BfR8bXMIjhOm3mWWoKC.woff2 and /dev/null differ diff --git a/pkg/framework/core/docs/site/assets/external/unpkg.com/iframe-worker/shim.js b/pkg/framework/core/docs/site/assets/external/unpkg.com/iframe-worker/shim.js deleted file mode 100644 index 5f1e232..0000000 --- a/pkg/framework/core/docs/site/assets/external/unpkg.com/iframe-worker/shim.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(()=>{function c(s,n){parent.postMessage(s,n||"*")}function d(...s){return s.reduce((n,e)=>n.then(()=>new Promise(r=>{let t=document.createElement("script");t.src=e,t.onload=r,document.body.appendChild(t)})),Promise.resolve())}var o=class extends EventTarget{constructor(e){super();this.url=e;this.m=e=>{e.source===this.w&&(this.dispatchEvent(new MessageEvent("message",{data:e.data})),this.onmessage&&this.onmessage(e))};this.e=(e,r,t,i,m)=>{if(r===`${this.url}`){let a=new ErrorEvent("error",{message:e,filename:r,lineno:t,colno:i,error:m});this.dispatchEvent(a),this.onerror&&this.onerror(a)}};let r=document.createElement("iframe");r.hidden=!0,document.body.appendChild(this.iframe=r),this.w.document.open(),this.w.document.write(` - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -
- - - - -
- - -
- -
- - - - - - - - - -
-
- - - -
-
-
- - - - - - - -
-
-
- - - -
-
-
- - - -
-
-
- - - -
- - - - - - - - - -
- - - - - - - -

Core.Config

-

Short: App config and UI state persistence.

-

Overview

-

Stores and retrieves configuration, including window positions/sizes and user prefs.

-

Setup

-
package main
-
-import (
-  core "github.com/Snider/Core"
-  config "github.com/Snider/Core/config"
-)
-
-app := core.New(
-  core.WithService(config.Register),
-  core.WithServiceLock(),
-)
-
-

Use

-
    -
  • Persist UI state automatically when using Core.Display.
  • -
  • Read/write your own settings via the config API.
  • -
-

API

-
    -
  • Register(c *core.Core) error
  • -
  • Get(path string, out any) error
  • -
  • Set(path string, v any) error
  • -
- - - - - - - - - - -
-
- - - - - -
- - - -
- -
- - -
- -
-
-
-
- - - - - - - - - \ No newline at end of file diff --git a/pkg/framework/core/docs/site/core/crypt.html b/pkg/framework/core/docs/site/core/crypt.html deleted file mode 100644 index 7c4fbfe..0000000 --- a/pkg/framework/core/docs/site/core/crypt.html +++ /dev/null @@ -1,934 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - Core.Crypt - Core.Help - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -
- - - - -
- - -
- -
- - - - - - - - - -
-
- - - -
-
-
- - - - - - - -
-
-
- - - -
-
-
- - - -
-
-
- - - -
- - - - - - - - - -
- - - - - - - -

Core.Crypt

-

Short: Keys, encrypt/decrypt, sign/verify.

-

Overview

-

Simple wrappers around OpenPGP for common crypto tasks.

-

Setup

-
import (
-  core "github.com/Snider/Core"
-  crypt "github.com/Snider/Core/crypt"
-)
-
-app := core.New(
-  core.WithService(crypt.Register),
-  core.WithServiceLock(),
-)
-
-

Use

-
    -
  • Generate keys
  • -
  • Encrypt/decrypt data
  • -
  • Sign/verify messages
  • -
-

API

-
    -
  • Register(c *core.Core) error
  • -
  • GenerateKey(opts ...Option) (*Key, error)
  • -
  • Encrypt(pub *Key, data []byte) ([]byte, error)
  • -
  • Decrypt(priv *Key, data []byte) ([]byte, error)
  • -
  • Sign(priv *Key, data []byte) ([]byte, error)
  • -
  • Verify(pub *Key, data, sig []byte) error
  • -
-

Notes

- - - - - - - - - - - -
-
- - - - - -
- - - -
- -
- - -
- -
-
-
-
- - - - - - - - - \ No newline at end of file diff --git a/pkg/framework/core/docs/site/core/display.html b/pkg/framework/core/docs/site/core/display.html deleted file mode 100644 index 85d104c..0000000 --- a/pkg/framework/core/docs/site/core/display.html +++ /dev/null @@ -1,936 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - Core.Display - Core.Help - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -
- - - - -
- - -
- -
- - - - - - - - - -
-
- - - -
-
-
- - - - - - - -
-
-
- - - -
-
-
- - - -
-
-
- - - -
- - - - - - - - - -
- - - - - - - -

Core.Display

-

Short: Windows, tray, and window state.

-

Overview

-

Manages Wails windows, remembers positions/sizes, exposes JS bindings, and integrates with Core.Config for persistence.

-

Setup

-
import (
-  core "github.com/Snider/Core"
-  display "github.com/Snider/Core/display"
-)
-
-app := core.New(
-  core.WithService(display.Register),
-  core.WithServiceLock(),
-)
-
-

Use

-
    -
  • Open a window: OpenWindow(OptName("main"), ...)
  • -
  • Get a window: Window("main")
  • -
  • Save/restore state automatically when Core.Config is present
  • -
-

API

-
    -
  • Register(c *core.Core) error
  • -
  • OpenWindow(opts ...Option) *Window
  • -
  • Window(name string) *Window
  • -
  • Options: OptName, OptWidth, OptHeight, OptURL, OptTitle
  • -
-

Example

-
func (d *API) ServiceStartup(ctx context.Context, _ application.ServiceOptions) error {
-  d.OpenWindow(
-    OptName("main"), OptWidth(1280), OptHeight(900), OptURL("/"), OptTitle("Core"),
-  )
-  return nil
-}
-
- - - - - - - - - - -
-
- - - - - -
- - - -
- -
- - -
- -
-
-
-
- - - - - - - - - \ No newline at end of file diff --git a/pkg/framework/core/docs/site/core/docs.html b/pkg/framework/core/docs/site/core/docs.html deleted file mode 100644 index dc90d1f..0000000 --- a/pkg/framework/core/docs/site/core/docs.html +++ /dev/null @@ -1,932 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - Core.Docs - Core.Help - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -
- - - - -
- - -
- -
- - - - - - - - - -
-
- - - -
-
-
- - - - - - - -
-
-
- - - -
-
-
- - - -
-
-
- - - -
- - - - - - - - - -
- - - - - - - -

Core.Docs

-

Short: In‑app help and deep‑links.

-

Overview

-

Renders MkDocs content inside your app. Opens specific sections in new windows for contextual help.

-

Setup

-
import (
-  core "github.com/Snider/Core"
-  docs "github.com/Snider/Core/docs"
-)
-
-app := core.New(
-  core.WithService(docs.Register),
-  core.WithServiceLock(),
-)
-
-

Use

-
    -
  • Open docs home in a window: docs.Open()
  • -
  • Open a section: docs.OpenAt("core/display#setup")
  • -
  • Use short, descriptive headings to create stable anchors.
  • -
-

API

-
    -
  • Register(c *core.Core) error
  • -
  • Open() — show docs home
  • -
  • OpenAt(anchor string) — open specific section
  • -
-

Notes

-
    -
  • Docs are built with MkDocs Material and included in the demo app assets.
  • -
  • You are viewing Core.Docs right now, this Website is bundled into the app binary by default.
  • -
- - - - - - - - - - -
-
- - - - - -
- - - -
- -
- - -
- -
-
-
-
- - - - - - - - - \ No newline at end of file diff --git a/pkg/framework/core/docs/site/core/index.html b/pkg/framework/core/docs/site/core/index.html deleted file mode 100644 index 38c575d..0000000 --- a/pkg/framework/core/docs/site/core/index.html +++ /dev/null @@ -1,901 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - Core - Core.Help - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -
- - - - -
- - -
- -
- - - - - - - - - -
-
- - - -
-
-
- - - - - - - -
-
-
- - - -
-
-
- - - -
-
-
- - - -
- - - - - - - - - -
- - - - - - - -

Core

-

Short: Framework bootstrap and service container.

-

What it is

-

Core wires modules together, provides lifecycle hooks, and locks the service graph for clarity and safety.

-

Setup

-
import "github.com/Snider/Core"
-
-app := core.New(
-    core.WithServiceLock(),
-)
-
-

Use

-
    -
  • Register a module: core.RegisterModule(name, module)
  • -
  • Access a module: core.Mod[T](c, name)
  • -
  • Lock services: core.WithServiceLock()
  • -
-

API

-
    -
  • New(opts ...) *core.Core
  • -
  • RegisterModule(name string, m any) error
  • -
  • Mod[T any](c *core.Core, name ...string) *T
  • -
- - - - - - - - - - -
-
- - - - - -
- - - -
- -
- - -
- -
-
-
-
- - - - - - - - - \ No newline at end of file diff --git a/pkg/framework/core/docs/site/core/io.html b/pkg/framework/core/docs/site/core/io.html deleted file mode 100644 index 4485a50..0000000 --- a/pkg/framework/core/docs/site/core/io.html +++ /dev/null @@ -1,932 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - Core.IO - Core.Help - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -
- - - - -
- - -
- -
- - - - - - - - - -
-
- - - -
-
-
- - - - - - - -
-
-
- - - -
-
-
- - - -
-
-
- - - -
- - - - - - - - - -
- - - - - - - -

Core.IO

-

Short: Local/remote filesystem helpers.

-

Overview

-

Abstracts filesystems (local, SFTP, WebDAV) behind a unified API for reading/writing and listing.

-

Setup

-
import (
-  core "github.com/Snider/Core"
-  ioapi "github.com/Snider/Core/filesystem"
-)
-
-app := core.New(
-  core.WithService(ioapi.Register),
-  core.WithServiceLock(),
-)
-
-

Use

-
    -
  • Open a filesystem: fs := ioapi.Local() or ioapi.SFTP(cfg)
  • -
  • Read/write files: fs.Read(path), fs.Write(path, data)
  • -
  • List directories: fs.List(path)
  • -
-

API

-
    -
  • Register(c *core.Core) error
  • -
  • Local() FS
  • -
  • SFTP(cfg Config) (FS, error)
  • -
  • WebDAV(cfg Config) (FS, error)
  • -
-

Notes

-
    -
  • See package pkg/v1/core/filesystem/* for drivers.
  • -
- - - - - - - - - - -
-
- - - - - -
- - - -
- -
- - -
- -
-
-
-
- - - - - - - - - \ No newline at end of file diff --git a/pkg/framework/core/docs/site/core/workspace.html b/pkg/framework/core/docs/site/core/workspace.html deleted file mode 100644 index 72bbc03..0000000 --- a/pkg/framework/core/docs/site/core/workspace.html +++ /dev/null @@ -1,930 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - Core.Workspace - Core.Help - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -
- - - - -
- - -
- -
- - - - - - - - - -
-
- - - -
-
-
- - - - - - - -
-
-
- - - -
-
-
- - - -
-
-
- - - -
- - - - - - - - - -
- - - - - - - -

Core.Workspace

-

Short: Projects and paths.

-

Overview

-

Provides a consistent way to resolve app/project directories, temp/cache locations, and user data paths across platforms.

-

Setup

-
import (
-  core "github.com/Snider/Core"
-  workspace "github.com/Snider/Core/workspace"
-)
-
-app := core.New(
-  core.WithService(workspace.Register),
-  core.WithServiceLock(),
-)
-
-

Use

-
    -
  • Get app data dir: ws.DataDir()
  • -
  • Get cache dir: ws.CacheDir()
  • -
  • Resolve project path: ws.Project("my-app")
  • -
-

API

-
    -
  • Register(c *core.Core) error
  • -
  • DataDir() string
  • -
  • CacheDir() string
  • -
  • Project(name string) string
  • -
-

Notes

-
    -
  • Follows OS directory standards (AppData, ~/Library, XDG, etc.).
  • -
- - - - - - - - - - -
-
- - - - - -
- - - -
- -
- - -
- -
-
-
-
- - - - - - - - - \ No newline at end of file diff --git a/pkg/framework/core/docs/site/images/cross-platform.jpeg b/pkg/framework/core/docs/site/images/cross-platform.jpeg deleted file mode 100644 index 8de2288..0000000 Binary files a/pkg/framework/core/docs/site/images/cross-platform.jpeg and /dev/null differ diff --git a/pkg/framework/core/docs/site/images/decentralised-vpn.jpg b/pkg/framework/core/docs/site/images/decentralised-vpn.jpg deleted file mode 100644 index df1f487..0000000 Binary files a/pkg/framework/core/docs/site/images/decentralised-vpn.jpg and /dev/null differ diff --git a/pkg/framework/core/docs/site/images/favicon.ico b/pkg/framework/core/docs/site/images/favicon.ico deleted file mode 100644 index 8bc8ebb..0000000 Binary files a/pkg/framework/core/docs/site/images/favicon.ico and /dev/null differ diff --git a/pkg/framework/core/docs/site/images/illustration.png b/pkg/framework/core/docs/site/images/illustration.png deleted file mode 100644 index 69f739c..0000000 Binary files a/pkg/framework/core/docs/site/images/illustration.png and /dev/null differ diff --git a/pkg/framework/core/docs/site/images/lethean-logo.png b/pkg/framework/core/docs/site/images/lethean-logo.png deleted file mode 100644 index 591019d..0000000 Binary files a/pkg/framework/core/docs/site/images/lethean-logo.png and /dev/null differ diff --git a/pkg/framework/core/docs/site/images/private-transaction-net.png b/pkg/framework/core/docs/site/images/private-transaction-net.png deleted file mode 100644 index 1eee17a..0000000 Binary files a/pkg/framework/core/docs/site/images/private-transaction-net.png and /dev/null differ diff --git a/pkg/framework/core/docs/site/images/secure-data-storage.jpg b/pkg/framework/core/docs/site/images/secure-data-storage.jpg deleted file mode 100644 index 395a8ae..0000000 Binary files a/pkg/framework/core/docs/site/images/secure-data-storage.jpg and /dev/null differ diff --git a/pkg/framework/core/docs/site/index.html b/pkg/framework/core/docs/site/index.html deleted file mode 100644 index a956691..0000000 --- a/pkg/framework/core/docs/site/index.html +++ /dev/null @@ -1,939 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - Core.Help - Core.Help - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -
- - - - -
- - -
- -
- - - - - - - - - -
-
- - - -
-
-
- - - - - - - -
-
-
- - - -
-
-
- - - -
-
-
- - - -
- - - - - - - - - -
- - - - - - - -

Overview

-

Core is an opinionated framework for building Go desktop apps with Wails, providing a small set of focused modules you can mix into your app. It ships with sensible defaults and a demo app that doubles as in‑app help.

-
    -
  • Site: https://dappco.re
  • -
  • Repo: https://github.com/Snider/Core
  • -
-

Modules

-
    -
  • Core — framework bootstrap and service container
  • -
  • Core.Config — app and UI state persistence
  • -
  • Core.Crypt — keys, encrypt/decrypt, sign/verify
  • -
  • Core.Display — windows, tray, window state
  • -
  • Core.Docs — in‑app help and deep‑links
  • -
  • Core.IO — local/remote filesystem helpers
  • -
  • Core.Workspace — projects and paths
  • -
-

Quick start

-
package main
-
-import (
-    core "github.com/Snider/Core"
-)
-
-func main() {
-    app := core.New(
-        core.WithServiceLock(),
-    )
-    _ = app // start via Wails in your main package
-}
-
-

Services

-
package demo
-
-import (
-    core "github.com/Snider/Core"
-)
-
-// Register your service
-func Register(c *core.Core) error {
-    return c.RegisterModule("demo", &Demo{core: c})
-}
-
-

Display example

-
package display
-
-import (
-    "context"
-    "github.com/wailsapp/wails/v3/pkg/application"
-)
-
-// Open a window on startup
-func (d *API) ServiceStartup(ctx context.Context, _ application.ServiceOptions) error {
-    d.OpenWindow(
-        OptName("main"),
-        OptHeight(900),
-        OptWidth(1280),
-        OptURL("/"),
-        OptTitle("Core"),
-    )
-    return nil
-}
-
-

See the left nav for detailed pages on each module.

- - - - - - - - - - -
-
- - - - - -
- - - -
- -
- - -
- -
-
-
-
- - - - - - - - - \ No newline at end of file diff --git a/pkg/framework/core/docs/site/search/search_index.js b/pkg/framework/core/docs/site/search/search_index.js deleted file mode 100644 index 193f050..0000000 --- a/pkg/framework/core/docs/site/search/search_index.js +++ /dev/null @@ -1 +0,0 @@ -var __index = {"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"],"fields":{"title":{"boost":1000.0},"text":{"boost":1.0},"tags":{"boost":1000000.0}}},"docs":[{"location":"index.html","title":"Overview","text":"

Core is an opinionated framework for building Go desktop apps with Wails, providing a small set of focused modules you can mix into your app. It ships with sensible defaults and a demo app that doubles as in\u2011app help.

  • Site: https://dappco.re
  • Repo: https://github.com/Snider/Core
"},{"location":"index.html#modules","title":"Modules","text":"
  • Core \u2014 framework bootstrap and service container
  • Core.Config \u2014 app and UI state persistence
  • Core.Crypt \u2014 keys, encrypt/decrypt, sign/verify
  • Core.Display \u2014 windows, tray, window state
  • Core.Docs \u2014 in\u2011app help and deep\u2011links
  • Core.IO \u2014 local/remote filesystem helpers
  • Core.Workspace \u2014 projects and paths
"},{"location":"index.html#quick-start","title":"Quick start","text":"
package main\nimport (\ncore \"github.com/Snider/Core\"\n)\nfunc main() {\napp := core.New(\ncore.WithServiceLock(),\n)\n_ = app // start via Wails in your main package\n}\n
"},{"location":"index.html#services","title":"Services","text":"
package demo\nimport (\ncore \"github.com/Snider/Core\"\n)\n// Register your service\nfunc Register(c *core.Core) error {\nreturn c.RegisterModule(\"demo\", &Demo{core: c})\n}\n
"},{"location":"index.html#display-example","title":"Display example","text":"
package display\nimport (\n\"context\"\n\"github.com/wailsapp/wails/v3/pkg/application\"\n)\n// Open a window on startup\nfunc (d *API) ServiceStartup(ctx context.Context, _ application.ServiceOptions) error {\nd.OpenWindow(\nOptName(\"main\"),\nOptHeight(900),\nOptWidth(1280),\nOptURL(\"/\"),\nOptTitle(\"Core\"),\n)\nreturn nil\n}\n

See the left nav for detailed pages on each module.

"},{"location":"core/index.html","title":"Core","text":"

Short: Framework bootstrap and service container.

"},{"location":"core/index.html#what-it-is","title":"What it is","text":"

Core wires modules together, provides lifecycle hooks, and locks the service graph for clarity and safety.

"},{"location":"core/index.html#setup","title":"Setup","text":"
import \"github.com/Snider/Core\"\napp := core.New(\ncore.WithServiceLock(),\n)\n
"},{"location":"core/index.html#use","title":"Use","text":"
  • Register a module: core.RegisterModule(name, module)
  • Access a module: core.Mod[T](c, name)
  • Lock services: core.WithServiceLock()
"},{"location":"core/index.html#api","title":"API","text":"
  • New(opts ...) *core.Core
  • RegisterModule(name string, m any) error
  • Mod[T any](c *core.Core, name ...string) *T
"},{"location":"core/config.html","title":"Core.Config","text":"

Short: App config and UI state persistence.

"},{"location":"core/config.html#overview","title":"Overview","text":"

Stores and retrieves configuration, including window positions/sizes and user prefs.

"},{"location":"core/config.html#setup","title":"Setup","text":"
package main\nimport (\ncore \"github.com/Snider/Core\"\nconfig \"github.com/Snider/Core/config\"\n)\napp := core.New(\ncore.WithService(config.Register),\ncore.WithServiceLock(),\n)\n
"},{"location":"core/config.html#use","title":"Use","text":"
  • Persist UI state automatically when using Core.Display.
  • Read/write your own settings via the config API.
"},{"location":"core/config.html#api","title":"API","text":"
  • Register(c *core.Core) error
  • Get(path string, out any) error
  • Set(path string, v any) error
"},{"location":"core/crypt.html","title":"Core.Crypt","text":"

Short: Keys, encrypt/decrypt, sign/verify.

"},{"location":"core/crypt.html#overview","title":"Overview","text":"

Simple wrappers around OpenPGP for common crypto tasks.

"},{"location":"core/crypt.html#setup","title":"Setup","text":"
import (\ncore \"github.com/Snider/Core\"\ncrypt \"github.com/Snider/Core/crypt\"\n)\napp := core.New(\ncore.WithService(crypt.Register),\ncore.WithServiceLock(),\n)\n
"},{"location":"core/crypt.html#use","title":"Use","text":"
  • Generate keys
  • Encrypt/decrypt data
  • Sign/verify messages
"},{"location":"core/crypt.html#api","title":"API","text":"
  • Register(c *core.Core) error
  • GenerateKey(opts ...Option) (*Key, error)
  • Encrypt(pub *Key, data []byte) ([]byte, error)
  • Decrypt(priv *Key, data []byte) ([]byte, error)
  • Sign(priv *Key, data []byte) ([]byte, error)
  • Verify(pub *Key, data, sig []byte) error
"},{"location":"core/crypt.html#notes","title":"Notes","text":"
  • Uses ProtonMail OpenPGP fork.
"},{"location":"core/display.html","title":"Core.Display","text":"

Short: Windows, tray, and window state.

"},{"location":"core/display.html#overview","title":"Overview","text":"

Manages Wails windows, remembers positions/sizes, exposes JS bindings, and integrates with Core.Config for persistence.

"},{"location":"core/display.html#setup","title":"Setup","text":"
import (\ncore \"github.com/Snider/Core\"\ndisplay \"github.com/Snider/Core/display\"\n)\napp := core.New(\ncore.WithService(display.Register),\ncore.WithServiceLock(),\n)\n
"},{"location":"core/display.html#use","title":"Use","text":"
  • Open a window: OpenWindow(OptName(\"main\"), ...)
  • Get a window: Window(\"main\")
  • Save/restore state automatically when Core.Config is present
"},{"location":"core/display.html#api","title":"API","text":"
  • Register(c *core.Core) error
  • OpenWindow(opts ...Option) *Window
  • Window(name string) *Window
  • Options: OptName, OptWidth, OptHeight, OptURL, OptTitle
"},{"location":"core/display.html#example","title":"Example","text":"
func (d *API) ServiceStartup(ctx context.Context, _ application.ServiceOptions) error {\nd.OpenWindow(\nOptName(\"main\"), OptWidth(1280), OptHeight(900), OptURL(\"/\"), OptTitle(\"Core\"),\n)\nreturn nil\n}\n
"},{"location":"core/docs.html","title":"Core.Docs","text":"

Short: In\u2011app help and deep\u2011links.

"},{"location":"core/docs.html#overview","title":"Overview","text":"

Renders MkDocs content inside your app. Opens specific sections in new windows for contextual help.

"},{"location":"core/docs.html#setup","title":"Setup","text":"
import (\ncore \"github.com/Snider/Core\"\ndocs \"github.com/Snider/Core/docs\"\n)\napp := core.New(\ncore.WithService(docs.Register),\ncore.WithServiceLock(),\n)\n
"},{"location":"core/docs.html#use","title":"Use","text":"
  • Open docs home in a window: docs.Open()
  • Open a section: docs.OpenAt(\"core/display#setup\")
  • Use short, descriptive headings to create stable anchors.
"},{"location":"core/docs.html#api","title":"API","text":"
  • Register(c *core.Core) error
  • Open() \u2014 show docs home
  • OpenAt(anchor string) \u2014 open specific section
"},{"location":"core/docs.html#notes","title":"Notes","text":"
  • Docs are built with MkDocs Material and included in the demo app assets.
  • You are viewing Core.Docs right now, this Website is bundled into the app binary by default.
"},{"location":"core/io.html","title":"Core.IO","text":"

Short: Local/remote filesystem helpers.

"},{"location":"core/io.html#overview","title":"Overview","text":"

Abstracts filesystems (local, SFTP, WebDAV) behind a unified API for reading/writing and listing.

"},{"location":"core/io.html#setup","title":"Setup","text":"
import (\ncore \"github.com/Snider/Core\"\nioapi \"github.com/Snider/Core/filesystem\"\n)\napp := core.New(\ncore.WithService(ioapi.Register),\ncore.WithServiceLock(),\n)\n
"},{"location":"core/io.html#use","title":"Use","text":"
  • Open a filesystem: fs := ioapi.Local() or ioapi.SFTP(cfg)
  • Read/write files: fs.Read(path), fs.Write(path, data)
  • List directories: fs.List(path)
"},{"location":"core/io.html#api","title":"API","text":"
  • Register(c *core.Core) error
  • Local() FS
  • SFTP(cfg Config) (FS, error)
  • WebDAV(cfg Config) (FS, error)
"},{"location":"core/io.html#notes","title":"Notes","text":"
  • See package pkg/v1/core/filesystem/* for drivers.
"},{"location":"core/workspace.html","title":"Core.Workspace","text":"

Short: Projects and paths.

"},{"location":"core/workspace.html#overview","title":"Overview","text":"

Provides a consistent way to resolve app/project directories, temp/cache locations, and user data paths across platforms.

"},{"location":"core/workspace.html#setup","title":"Setup","text":"
import (\ncore \"github.com/Snider/Core\"\nworkspace \"github.com/Snider/Core/workspace\"\n)\napp := core.New(\ncore.WithService(workspace.Register),\ncore.WithServiceLock(),\n)\n
"},{"location":"core/workspace.html#use","title":"Use","text":"
  • Get app data dir: ws.DataDir()
  • Get cache dir: ws.CacheDir()
  • Resolve project path: ws.Project(\"my-app\")
"},{"location":"core/workspace.html#api","title":"API","text":"
  • Register(c *core.Core) error
  • DataDir() string
  • CacheDir() string
  • Project(name string) string
"},{"location":"core/workspace.html#notes","title":"Notes","text":"
  • Follows OS directory standards (AppData, ~/Library, XDG, etc.).
"}]} \ No newline at end of file diff --git a/pkg/framework/core/docs/site/search/search_index.json b/pkg/framework/core/docs/site/search/search_index.json deleted file mode 100644 index 323cc07..0000000 --- a/pkg/framework/core/docs/site/search/search_index.json +++ /dev/null @@ -1 +0,0 @@ -{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"],"fields":{"title":{"boost":1000.0},"text":{"boost":1.0},"tags":{"boost":1000000.0}}},"docs":[{"location":"index.html","title":"Overview","text":"

Core is an opinionated framework for building Go desktop apps with Wails, providing a small set of focused modules you can mix into your app. It ships with sensible defaults and a demo app that doubles as in\u2011app help.

  • Site: https://dappco.re
  • Repo: https://github.com/Snider/Core
"},{"location":"index.html#modules","title":"Modules","text":"
  • Core \u2014 framework bootstrap and service container
  • Core.Config \u2014 app and UI state persistence
  • Core.Crypt \u2014 keys, encrypt/decrypt, sign/verify
  • Core.Display \u2014 windows, tray, window state
  • Core.Docs \u2014 in\u2011app help and deep\u2011links
  • Core.IO \u2014 local/remote filesystem helpers
  • Core.Workspace \u2014 projects and paths
"},{"location":"index.html#quick-start","title":"Quick start","text":"
package main\nimport (\ncore \"github.com/Snider/Core\"\n)\nfunc main() {\napp := core.New(\ncore.WithServiceLock(),\n)\n_ = app // start via Wails in your main package\n}\n
"},{"location":"index.html#services","title":"Services","text":"
package demo\nimport (\ncore \"github.com/Snider/Core\"\n)\n// Register your service\nfunc Register(c *core.Core) error {\nreturn c.RegisterModule(\"demo\", &Demo{core: c})\n}\n
"},{"location":"index.html#display-example","title":"Display example","text":"
package display\nimport (\n\"context\"\n\"github.com/wailsapp/wails/v3/pkg/application\"\n)\n// Open a window on startup\nfunc (d *API) ServiceStartup(ctx context.Context, _ application.ServiceOptions) error {\nd.OpenWindow(\nOptName(\"main\"),\nOptHeight(900),\nOptWidth(1280),\nOptURL(\"/\"),\nOptTitle(\"Core\"),\n)\nreturn nil\n}\n

See the left nav for detailed pages on each module.

"},{"location":"core/index.html","title":"Core","text":"

Short: Framework bootstrap and service container.

"},{"location":"core/index.html#what-it-is","title":"What it is","text":"

Core wires modules together, provides lifecycle hooks, and locks the service graph for clarity and safety.

"},{"location":"core/index.html#setup","title":"Setup","text":"
import \"github.com/Snider/Core\"\napp := core.New(\ncore.WithServiceLock(),\n)\n
"},{"location":"core/index.html#use","title":"Use","text":"
  • Register a module: core.RegisterModule(name, module)
  • Access a module: core.Mod[T](c, name)
  • Lock services: core.WithServiceLock()
"},{"location":"core/index.html#api","title":"API","text":"
  • New(opts ...) *core.Core
  • RegisterModule(name string, m any) error
  • Mod[T any](c *core.Core, name ...string) *T
"},{"location":"core/config.html","title":"Core.Config","text":"

Short: App config and UI state persistence.

"},{"location":"core/config.html#overview","title":"Overview","text":"

Stores and retrieves configuration, including window positions/sizes and user prefs.

"},{"location":"core/config.html#setup","title":"Setup","text":"
package main\nimport (\ncore \"github.com/Snider/Core\"\nconfig \"github.com/Snider/Core/config\"\n)\napp := core.New(\ncore.WithService(config.Register),\ncore.WithServiceLock(),\n)\n
"},{"location":"core/config.html#use","title":"Use","text":"
  • Persist UI state automatically when using Core.Display.
  • Read/write your own settings via the config API.
"},{"location":"core/config.html#api","title":"API","text":"
  • Register(c *core.Core) error
  • Get(path string, out any) error
  • Set(path string, v any) error
"},{"location":"core/crypt.html","title":"Core.Crypt","text":"

Short: Keys, encrypt/decrypt, sign/verify.

"},{"location":"core/crypt.html#overview","title":"Overview","text":"

Simple wrappers around OpenPGP for common crypto tasks.

"},{"location":"core/crypt.html#setup","title":"Setup","text":"
import (\ncore \"github.com/Snider/Core\"\ncrypt \"github.com/Snider/Core/crypt\"\n)\napp := core.New(\ncore.WithService(crypt.Register),\ncore.WithServiceLock(),\n)\n
"},{"location":"core/crypt.html#use","title":"Use","text":"
  • Generate keys
  • Encrypt/decrypt data
  • Sign/verify messages
"},{"location":"core/crypt.html#api","title":"API","text":"
  • Register(c *core.Core) error
  • GenerateKey(opts ...Option) (*Key, error)
  • Encrypt(pub *Key, data []byte) ([]byte, error)
  • Decrypt(priv *Key, data []byte) ([]byte, error)
  • Sign(priv *Key, data []byte) ([]byte, error)
  • Verify(pub *Key, data, sig []byte) error
"},{"location":"core/crypt.html#notes","title":"Notes","text":"
  • Uses ProtonMail OpenPGP fork.
"},{"location":"core/display.html","title":"Core.Display","text":"

Short: Windows, tray, and window state.

"},{"location":"core/display.html#overview","title":"Overview","text":"

Manages Wails windows, remembers positions/sizes, exposes JS bindings, and integrates with Core.Config for persistence.

"},{"location":"core/display.html#setup","title":"Setup","text":"
import (\ncore \"github.com/Snider/Core\"\ndisplay \"github.com/Snider/Core/display\"\n)\napp := core.New(\ncore.WithService(display.Register),\ncore.WithServiceLock(),\n)\n
"},{"location":"core/display.html#use","title":"Use","text":"
  • Open a window: OpenWindow(OptName(\"main\"), ...)
  • Get a window: Window(\"main\")
  • Save/restore state automatically when Core.Config is present
"},{"location":"core/display.html#api","title":"API","text":"
  • Register(c *core.Core) error
  • OpenWindow(opts ...Option) *Window
  • Window(name string) *Window
  • Options: OptName, OptWidth, OptHeight, OptURL, OptTitle
"},{"location":"core/display.html#example","title":"Example","text":"
func (d *API) ServiceStartup(ctx context.Context, _ application.ServiceOptions) error {\nd.OpenWindow(\nOptName(\"main\"), OptWidth(1280), OptHeight(900), OptURL(\"/\"), OptTitle(\"Core\"),\n)\nreturn nil\n}\n
"},{"location":"core/docs.html","title":"Core.Docs","text":"

Short: In\u2011app help and deep\u2011links.

"},{"location":"core/docs.html#overview","title":"Overview","text":"

Renders MkDocs content inside your app. Opens specific sections in new windows for contextual help.

"},{"location":"core/docs.html#setup","title":"Setup","text":"
import (\ncore \"github.com/Snider/Core\"\ndocs \"github.com/Snider/Core/docs\"\n)\napp := core.New(\ncore.WithService(docs.Register),\ncore.WithServiceLock(),\n)\n
"},{"location":"core/docs.html#use","title":"Use","text":"
  • Open docs home in a window: docs.Open()
  • Open a section: docs.OpenAt(\"core/display#setup\")
  • Use short, descriptive headings to create stable anchors.
"},{"location":"core/docs.html#api","title":"API","text":"
  • Register(c *core.Core) error
  • Open() \u2014 show docs home
  • OpenAt(anchor string) \u2014 open specific section
"},{"location":"core/docs.html#notes","title":"Notes","text":"
  • Docs are built with MkDocs Material and included in the demo app assets.
  • You are viewing Core.Docs right now, this Website is bundled into the app binary by default.
"},{"location":"core/io.html","title":"Core.IO","text":"

Short: Local/remote filesystem helpers.

"},{"location":"core/io.html#overview","title":"Overview","text":"

Abstracts filesystems (local, SFTP, WebDAV) behind a unified API for reading/writing and listing.

"},{"location":"core/io.html#setup","title":"Setup","text":"
import (\ncore \"github.com/Snider/Core\"\nioapi \"github.com/Snider/Core/filesystem\"\n)\napp := core.New(\ncore.WithService(ioapi.Register),\ncore.WithServiceLock(),\n)\n
"},{"location":"core/io.html#use","title":"Use","text":"
  • Open a filesystem: fs := ioapi.Local() or ioapi.SFTP(cfg)
  • Read/write files: fs.Read(path), fs.Write(path, data)
  • List directories: fs.List(path)
"},{"location":"core/io.html#api","title":"API","text":"
  • Register(c *core.Core) error
  • Local() FS
  • SFTP(cfg Config) (FS, error)
  • WebDAV(cfg Config) (FS, error)
"},{"location":"core/io.html#notes","title":"Notes","text":"
  • See package pkg/v1/core/filesystem/* for drivers.
"},{"location":"core/workspace.html","title":"Core.Workspace","text":"

Short: Projects and paths.

"},{"location":"core/workspace.html#overview","title":"Overview","text":"

Provides a consistent way to resolve app/project directories, temp/cache locations, and user data paths across platforms.

"},{"location":"core/workspace.html#setup","title":"Setup","text":"
import (\ncore \"github.com/Snider/Core\"\nworkspace \"github.com/Snider/Core/workspace\"\n)\napp := core.New(\ncore.WithService(workspace.Register),\ncore.WithServiceLock(),\n)\n
"},{"location":"core/workspace.html#use","title":"Use","text":"
  • Get app data dir: ws.DataDir()
  • Get cache dir: ws.CacheDir()
  • Resolve project path: ws.Project(\"my-app\")
"},{"location":"core/workspace.html#api","title":"API","text":"
  • Register(c *core.Core) error
  • DataDir() string
  • CacheDir() string
  • Project(name string) string
"},{"location":"core/workspace.html#notes","title":"Notes","text":"
  • Follows OS directory standards (AppData, ~/Library, XDG, etc.).
"}]} \ No newline at end of file diff --git a/pkg/framework/core/docs/site/sitemap.xml b/pkg/framework/core/docs/site/sitemap.xml deleted file mode 100644 index a063358..0000000 --- a/pkg/framework/core/docs/site/sitemap.xml +++ /dev/null @@ -1,35 +0,0 @@ - - - - https://dappco.re/index.html - 2025-10-25 - - - https://dappco.re/core/index.html - 2025-10-25 - - - https://dappco.re/core/config.html - 2025-10-25 - - - https://dappco.re/core/crypt.html - 2025-10-25 - - - https://dappco.re/core/display.html - 2025-10-25 - - - https://dappco.re/core/docs.html - 2025-10-25 - - - https://dappco.re/core/io.html - 2025-10-25 - - - https://dappco.re/core/workspace.html - 2025-10-25 - - \ No newline at end of file diff --git a/pkg/framework/core/docs/site/sitemap.xml.gz b/pkg/framework/core/docs/site/sitemap.xml.gz deleted file mode 100644 index c4e06d9..0000000 Binary files a/pkg/framework/core/docs/site/sitemap.xml.gz and /dev/null differ diff --git a/pkg/framework/core/docs/site/stylesheets/extra.css b/pkg/framework/core/docs/site/stylesheets/extra.css deleted file mode 100644 index 8a89327..0000000 --- a/pkg/framework/core/docs/site/stylesheets/extra.css +++ /dev/null @@ -1,367 +0,0 @@ -[data-md-color-scheme="lethean"] { - --md-primary-fg-color: #0F131C; -} - -.hero-section { - background: linear-gradient(135deg, #0F131C 0%, #1a237e 100%); - color: white; - padding: 4rem 2rem; - text-align: center; - margin-bottom: 3rem; -} - -.hero-content { - max-width: 800px; - margin: 0 auto; -} - -.hero-content h1 { - font-size: 2.5rem; - margin-bottom: 1rem; - color: white; - text-shadow: 0 1px 2px rgba(0, 0, 0, 0.2); -} - -.hero-subtitle { - font-size: 1.25rem; - margin-bottom: 2rem; - opacity: 0.9; -} - -.hero-badges { - margin-bottom: 2rem; -} - -.badge { - background: rgba(255, 255, 255, 0.1); - padding: 0.5rem 1rem; - border-radius: 20px; - margin: 0 0.5rem; - font-size: 0.9rem; -} - -.cta-button { - display: inline-block; - background: #4A90E2; - color: white; - padding: 0.8rem 2rem; - border-radius: 4px; - text-decoration: none; - font-weight: 500; - transition: all 0.3s; -} - -.cta-button:hover { - background: #357ABD; - color: white; - transform: translateY(-2px); -} - -.cta-button.secondary { - background: transparent; - border: 2px solid #4A90E2; - color: #4A90E2; -} - -.features-grid { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(250px, 1fr)); - gap: 0.2rem; - padding: 0.2rem; - margin-bottom: 3rem; -} - -.feature-card { - background: white; - border-radius: 8px; - padding: 1.0rem; - border: 2px solid #e2e8f0; - box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); - transition: all 0.3s; -} - -[data-md-color-scheme="slate"] .feature-card { - background: #2d3748; - border-color: #4a5568; - color: #e2e8f0; -} - -.feature-card:hover { - transform: translateY(-5px); - box-shadow: 0 6px 8px rgba(0, 0, 0, 0.15); -} - -.feature-card img { - width: 100%; - height: 150px; - object-fit: cover; - border-radius: 4px; - margin-bottom: 1rem; -} - -.feature-card h3 { - margin: 1rem 0; - color: #0F131C; - text-shadow: 0 1px 2px rgba(0, 0, 0, 0.2); -} - -[data-md-color-scheme="slate"] .feature-card h3 { - color: #e2e8f0; -} - -.get-started { - color: #4A90E2; - text-decoration: none; - font-weight: 500; -} - -.benefits-section { - background: #f5f5f5; - padding: 0.4rem 0.2rem; - text-align: center; - margin-bottom: 3rem; -} - -.benefits-section h2 { - font-size: 1.5rem; - font-weight: 600; - text-transform: uppercase; - letter-spacing: 1px; - margin-bottom: 0.5rem; - margin-top: 0.8rem; - text-shadow: 0 1px 2px rgba(0, 0, 0, 0.2); -} - -[data-md-color-scheme="slate"] .benefits-section { - background: #1a202c; - color: #e2e8f0; -} - -.benefits-grid { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); - gap: 0.2rem; - padding: 0.2rem; - margin: 0.2rem auto; -} - -.benefit-card { - background: white; - padding: 0.5rem; - border-radius: 8px; - border: 2px solid #e2e8f0; - box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); - text-align: left; -} - -[data-md-color-scheme="slate"] .benefit-card { - background: #2d3748; - border-color: #4a5568; - color: #e2e8f0; -} - -.roadmap-section { - padding: 0.4rem 0.2rem; - max-width: 1200px; - margin: 0 auto; -} - -.timeline { - position: relative; - display: grid; - grid-template-columns: repeat(2, 1fr); - gap: 2rem; - margin: 2rem 0; -} - -.timeline-item { - background: white; - padding: 1.5rem; - border-radius: 8px; - border: 2px solid #e2e8f0; - box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); - position: relative; - transition: all 0.3s; -} - -.timeline-item.completed { - grid-column: span 2; -} - -[data-md-color-scheme="slate"] .timeline-item { - background: #2d3748; - border-color: #4a5568; - color: #e2e8f0; -} - -.timeline-item:hover { - transform: translateY(-2px); - box-shadow: 0 6px 8px rgba(0, 0, 0, 0.15); -} - -.timeline-marker { - width: 20px; - height: 20px; - border-radius: 50%; - position: absolute; - top: -10px; - left: 50%; - transform: translateX(-50%); -} - -.timeline-item.planning .timeline-marker { - background: #718096; -} - -.timeline-item.in-progress .timeline-marker { - background: #4A90E2; -} - -.timeline-item.completed .timeline-marker { - background: #48BB78; -} - -.timeline-item ul { - list-style: none; - padding: 0; -} - -.timeline-item li { - margin: 0.5rem 0; - padding-left: 24px; - position: relative; -} - -.timeline-item li::before { - content: ""; - width: 12px; - height: 12px; - border-radius: 50%; - position: absolute; - left: 0; - top: 50%; - transform: translateY(-50%); -} - -.timeline-item li.planned::before { - background: #718096; -} - -.timeline-item li.active::before { - background: #4A90E2; -} - -.timeline-item li.completed::before { - background: #48BB78; -} - -.timeline-item li ul { - margin-top: 0.5rem; - margin-left: 1rem; -} - -.timeline-item li ul li { - font-size: 0.9rem; - margin: 0.25rem 0; -} - -.timeline-item li ul li::before { - width: 8px; - height: 8px; - background: #a0aec0; -} - -.timeline-item li ul li a { - color: #4A90E2; - text-decoration: none; - font-weight: 500; -} - -.timeline-item li ul li a:hover { - color: #357ABD; - text-decoration: underline; -} - -[data-md-color-scheme="slate"] .timeline-item li ul li a { - color: #63b3ed; -} - -[data-md-color-scheme="slate"] .timeline-item li ul li a:hover { - color: #90cdf4; -} - -.date { - font-size: 0.8rem; - color: #718096; - margin-left: 0.5rem; -} - -[data-md-color-scheme="slate"] .date { - color: #a0aec0; -} - -.cta-section { - background: #0F131C; - color: white; - padding: 4rem 2rem; - text-align: center; - margin-bottom: 3rem; -} - -.cta-buttons { - display: flex; - gap: 1rem; - justify-content: center; - margin-top: 2rem; -} - -.community-section { - padding: 4rem 2rem; - text-align: center; -} - -.community-links { - display: flex; - gap: 2rem; - justify-content: center; - margin-top: 2rem; -} - -.community-link { - color: #4A90E2; - text-decoration: none; - font-weight: 500; - transition: all 0.3s; -} - -.community-link:hover { - color: #357ABD; - transform: translateY(-2px); -} - -@media (max-width: 768px) { - .hero-content h1 { - font-size: 2rem; - } - - .timeline { - grid-template-columns: 1fr; - } - - .timeline-item.completed { - grid-column: auto; - } - - .features-grid { - grid-template-columns: 1fr; - } - - .cta-buttons { - flex-direction: column; - } - - .community-links { - flex-direction: column; - gap: 1rem; - } -} \ No newline at end of file diff --git a/pkg/framework/core/e.go b/pkg/framework/core/e.go deleted file mode 100644 index fb1a1e4..0000000 --- a/pkg/framework/core/e.go +++ /dev/null @@ -1,59 +0,0 @@ -// Package core provides a standardized error handling mechanism for the Core library. -// It allows for wrapping errors with contextual information, making it easier to -// trace the origin of an error and provide meaningful feedback. -// -// The design of this package is influenced by the need for a simple, yet powerful -// way to handle errors that can occur in different layers of the application, -// from low-level file operations to high-level service interactions. -// -// The key features of this package are: -// - Error wrapping: The Op and an optional Msg field provide context about -// where and why an error occurred. -// - Stack traces: By wrapping errors, we can build a logical stack trace -// that is more informative than a raw stack trace. -// - Consistent error handling: Encourages a uniform approach to error -// handling across the entire codebase. -package core - -import ( - "fmt" -) - -// Error represents a standardized error with operational context. -type Error struct { - // Op is the operation being performed, e.g., "config.Load". - Op string - // Msg is a human-readable message explaining the error. - Msg string - // Err is the underlying error that was wrapped. - Err error -} - -// E is a helper function to create a new Error. -// This is the primary way to create errors that will be consumed by the system. -// For example: -// -// return e.E("config.Load", "failed to load config file", err) -// -// The 'op' parameter should be in the format of 'package.function' or 'service.method'. -// The 'msg' parameter should be a human-readable message that can be displayed to the user. -// The 'err' parameter is the underlying error that is being wrapped. -func E(op, msg string, err error) error { - if err == nil { - return &Error{Op: op, Msg: msg} - } - return &Error{Op: op, Msg: msg, Err: err} -} - -// Error returns the string representation of the error. -func (e *Error) Error() string { - if e.Err != nil { - return fmt.Sprintf("%s: %s: %v", e.Op, e.Msg, e.Err) - } - return fmt.Sprintf("%s: %s", e.Op, e.Msg) -} - -// Unwrap provides compatibility for Go's errors.Is and errors.As functions. -func (e *Error) Unwrap() error { - return e.Err -} \ No newline at end of file diff --git a/pkg/framework/core/e_test.go b/pkg/framework/core/e_test.go deleted file mode 100644 index 71b04c0..0000000 --- a/pkg/framework/core/e_test.go +++ /dev/null @@ -1,29 +0,0 @@ -package core - -import ( - "errors" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestE_Good(t *testing.T) { - err := E("test.op", "test message", assert.AnError) - assert.Error(t, err) - assert.Equal(t, "test.op: test message: assert.AnError general error for testing", err.Error()) - - err = E("test.op", "test message", nil) - assert.Error(t, err) - assert.Equal(t, "test.op: test message", err.Error()) -} - -func TestE_Unwrap(t *testing.T) { - originalErr := errors.New("original error") - err := E("test.op", "test message", originalErr) - - assert.True(t, errors.Is(err, originalErr)) - - var eErr *Error - assert.True(t, errors.As(err, &eErr)) - assert.Equal(t, "test.op", eErr.Op) -} diff --git a/pkg/framework/core/interfaces.go b/pkg/framework/core/interfaces.go deleted file mode 100644 index f382cff..0000000 --- a/pkg/framework/core/interfaces.go +++ /dev/null @@ -1,122 +0,0 @@ -package core - -import ( - "context" - "embed" - "sync" -) - -// This file defines the public API contracts (interfaces) for the services -// in the Core framework. Services depend on these interfaces, not on -// concrete implementations. - -// Contract specifies the operational guarantees that the Core and its services must adhere to. -// This is used for configuring panic handling and other resilience features. -type Contract struct { - // DontPanic, if true, instructs the Core to recover from panics and return an error instead. - DontPanic bool - // DisableLogging, if true, disables all logging from the Core and its services. - DisableLogging bool -} - -// Features provides a way to check if a feature is enabled. -// This is used for feature flagging and conditional logic. -type Features struct { - // Flags is a list of enabled feature flags. - Flags []string -} - -// IsEnabled returns true if the given feature is enabled. -func (f *Features) IsEnabled(feature string) bool { - for _, flag := range f.Flags { - if flag == feature { - return true - } - } - return false -} - -// Option is a function that configures the Core. -// This is used to apply settings and register services during initialization. -type Option func(*Core) error - -// Message is the interface for all messages that can be sent through the Core's IPC system. -// Any struct can be a message, allowing for structured data to be passed between services. -// Used with ACTION for fire-and-forget broadcasts. -type Message interface{} - -// Query is the interface for read-only requests that return data. -// Used with QUERY (first responder) or QUERYALL (all responders). -type Query interface{} - -// Task is the interface for requests that perform side effects. -// Used with PERFORM (first responder executes). -type Task interface{} - -// QueryHandler handles Query requests. Returns (result, handled, error). -// If handled is false, the query will be passed to the next handler. -type QueryHandler func(*Core, Query) (any, bool, error) - -// TaskHandler handles Task requests. Returns (result, handled, error). -// If handled is false, the task will be passed to the next handler. -type TaskHandler func(*Core, Task) (any, bool, error) - -// Startable is an interface for services that need to perform initialization. -type Startable interface { - OnStartup(ctx context.Context) error -} - -// Stoppable is an interface for services that need to perform cleanup. -type Stoppable interface { - OnShutdown(ctx context.Context) error -} - -// Core is the central application object that manages services, assets, and communication. -type Core struct { - once sync.Once - initErr error - App any // GUI runtime (e.g., Wails App) - set by WithApp option - assets embed.FS - Features *Features - serviceLock bool - ipcMu sync.RWMutex - ipcHandlers []func(*Core, Message) error - queryMu sync.RWMutex - queryHandlers []QueryHandler - taskMu sync.RWMutex - taskHandlers []TaskHandler - serviceMu sync.RWMutex - services map[string]any - servicesLocked bool - startables []Startable - stoppables []Stoppable -} - -var instance *Core - -// Config provides access to application configuration. -type Config interface { - // Get retrieves a configuration value by key and stores it in the 'out' variable. - Get(key string, out any) error - // Set stores a configuration value by key. - Set(key string, v any) error -} - -// WindowOption is an interface for applying configuration options to a window. -type WindowOption interface { - Apply(any) -} - -// Display provides access to windowing and visual elements. -type Display interface { - // OpenWindow creates a new window with the given options. - OpenWindow(opts ...WindowOption) error -} - -// ActionServiceStartup is a message sent when the application's services are starting up. -// This provides a hook for services to perform initialization tasks. -type ActionServiceStartup struct{} - -// ActionServiceShutdown is a message sent when the application is shutting down. -// This allows services to perform cleanup tasks, such as saving state or closing resources. -type ActionServiceShutdown struct{} diff --git a/pkg/framework/core/ipc_test.go b/pkg/framework/core/ipc_test.go deleted file mode 100644 index 87b6570..0000000 --- a/pkg/framework/core/ipc_test.go +++ /dev/null @@ -1,77 +0,0 @@ -package core - -import ( - "errors" - "testing" - - "github.com/stretchr/testify/assert" -) - -type IPCTestQuery struct{ Value string } -type IPCTestTask struct{ Value string } - -func TestIPC_Query(t *testing.T) { - c, _ := New() - - // No handler - res, handled, err := c.QUERY(IPCTestQuery{}) - assert.False(t, handled) - assert.Nil(t, res) - assert.Nil(t, err) - - // With handler - c.RegisterQuery(func(c *Core, q Query) (any, bool, error) { - if tq, ok := q.(IPCTestQuery); ok { - return tq.Value + "-response", true, nil - } - return nil, false, nil - }) - - res, handled, err = c.QUERY(IPCTestQuery{Value: "test"}) - assert.True(t, handled) - assert.Nil(t, err) - assert.Equal(t, "test-response", res) -} - -func TestIPC_QueryAll(t *testing.T) { - c, _ := New() - - c.RegisterQuery(func(c *Core, q Query) (any, bool, error) { - return "h1", true, nil - }) - c.RegisterQuery(func(c *Core, q Query) (any, bool, error) { - return "h2", true, nil - }) - - results, err := c.QUERYALL(IPCTestQuery{}) - assert.Nil(t, err) - assert.Len(t, results, 2) - assert.Contains(t, results, "h1") - assert.Contains(t, results, "h2") -} - -func TestIPC_Perform(t *testing.T) { - c, _ := New() - - c.RegisterTask(func(c *Core, task Task) (any, bool, error) { - if tt, ok := task.(IPCTestTask); ok { - if tt.Value == "error" { - return nil, true, errors.New("task error") - } - return "done", true, nil - } - return nil, false, nil - }) - - // Success - res, handled, err := c.PERFORM(IPCTestTask{Value: "run"}) - assert.True(t, handled) - assert.Nil(t, err) - assert.Equal(t, "done", res) - - // Error - res, handled, err = c.PERFORM(IPCTestTask{Value: "error"}) - assert.True(t, handled) - assert.Error(t, err) - assert.Nil(t, res) -} diff --git a/pkg/framework/core/query_test.go b/pkg/framework/core/query_test.go deleted file mode 100644 index 43b00fb..0000000 --- a/pkg/framework/core/query_test.go +++ /dev/null @@ -1,201 +0,0 @@ -package core - -import ( - "errors" - "testing" - - "github.com/stretchr/testify/assert" -) - -type TestQuery struct { - Value string -} - -type TestTask struct { - Value string -} - -func TestCore_QUERY_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - // Register a handler that responds to TestQuery - c.RegisterQuery(func(c *Core, q Query) (any, bool, error) { - if tq, ok := q.(TestQuery); ok { - return "result-" + tq.Value, true, nil - } - return nil, false, nil - }) - - result, handled, err := c.QUERY(TestQuery{Value: "test"}) - assert.NoError(t, err) - assert.True(t, handled) - assert.Equal(t, "result-test", result) -} - -func TestCore_QUERY_NotHandled(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - // No handlers registered - result, handled, err := c.QUERY(TestQuery{Value: "test"}) - assert.NoError(t, err) - assert.False(t, handled) - assert.Nil(t, result) -} - -func TestCore_QUERY_FirstResponder(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - // First handler responds - c.RegisterQuery(func(c *Core, q Query) (any, bool, error) { - return "first", true, nil - }) - - // Second handler would respond but shouldn't be called - c.RegisterQuery(func(c *Core, q Query) (any, bool, error) { - return "second", true, nil - }) - - result, handled, err := c.QUERY(TestQuery{}) - assert.NoError(t, err) - assert.True(t, handled) - assert.Equal(t, "first", result) -} - -func TestCore_QUERY_SkipsNonHandlers(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - // First handler doesn't handle - c.RegisterQuery(func(c *Core, q Query) (any, bool, error) { - return nil, false, nil - }) - - // Second handler responds - c.RegisterQuery(func(c *Core, q Query) (any, bool, error) { - return "second", true, nil - }) - - result, handled, err := c.QUERY(TestQuery{}) - assert.NoError(t, err) - assert.True(t, handled) - assert.Equal(t, "second", result) -} - -func TestCore_QUERYALL_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - // Multiple handlers respond - c.RegisterQuery(func(c *Core, q Query) (any, bool, error) { - return "first", true, nil - }) - - c.RegisterQuery(func(c *Core, q Query) (any, bool, error) { - return "second", true, nil - }) - - c.RegisterQuery(func(c *Core, q Query) (any, bool, error) { - return nil, false, nil // Doesn't handle - }) - - results, err := c.QUERYALL(TestQuery{}) - assert.NoError(t, err) - assert.Len(t, results, 2) - assert.Contains(t, results, "first") - assert.Contains(t, results, "second") -} - -func TestCore_QUERYALL_AggregatesErrors(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - err1 := errors.New("error1") - err2 := errors.New("error2") - - c.RegisterQuery(func(c *Core, q Query) (any, bool, error) { - return "result1", true, err1 - }) - - c.RegisterQuery(func(c *Core, q Query) (any, bool, error) { - return "result2", true, err2 - }) - - results, err := c.QUERYALL(TestQuery{}) - assert.Error(t, err) - assert.ErrorIs(t, err, err1) - assert.ErrorIs(t, err, err2) - assert.Len(t, results, 2) -} - -func TestCore_PERFORM_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - executed := false - c.RegisterTask(func(c *Core, t Task) (any, bool, error) { - if tt, ok := t.(TestTask); ok { - executed = true - return "done-" + tt.Value, true, nil - } - return nil, false, nil - }) - - result, handled, err := c.PERFORM(TestTask{Value: "work"}) - assert.NoError(t, err) - assert.True(t, handled) - assert.True(t, executed) - assert.Equal(t, "done-work", result) -} - -func TestCore_PERFORM_NotHandled(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - // No handlers registered - result, handled, err := c.PERFORM(TestTask{Value: "work"}) - assert.NoError(t, err) - assert.False(t, handled) - assert.Nil(t, result) -} - -func TestCore_PERFORM_FirstResponder(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - callCount := 0 - - c.RegisterTask(func(c *Core, t Task) (any, bool, error) { - callCount++ - return "first", true, nil - }) - - c.RegisterTask(func(c *Core, t Task) (any, bool, error) { - callCount++ - return "second", true, nil - }) - - result, handled, err := c.PERFORM(TestTask{}) - assert.NoError(t, err) - assert.True(t, handled) - assert.Equal(t, "first", result) - assert.Equal(t, 1, callCount) // Only first handler called -} - -func TestCore_PERFORM_WithError(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - expectedErr := errors.New("task failed") - c.RegisterTask(func(c *Core, t Task) (any, bool, error) { - return nil, true, expectedErr - }) - - result, handled, err := c.PERFORM(TestTask{}) - assert.Error(t, err) - assert.ErrorIs(t, err, expectedErr) - assert.True(t, handled) - assert.Nil(t, result) -} diff --git a/pkg/framework/core/runtime_pkg.go b/pkg/framework/core/runtime_pkg.go deleted file mode 100644 index 71199f6..0000000 --- a/pkg/framework/core/runtime_pkg.go +++ /dev/null @@ -1,112 +0,0 @@ -package core - -import ( - "context" - "fmt" - "sort" -) - -// ServiceRuntime is a helper struct embedded in services to provide access to the core application. -// It is generic and can be parameterized with a service-specific options struct. -type ServiceRuntime[T any] struct { - core *Core - opts T -} - -// NewServiceRuntime creates a new ServiceRuntime instance for a service. -// This is typically called by a service's constructor. -func NewServiceRuntime[T any](c *Core, opts T) *ServiceRuntime[T] { - return &ServiceRuntime[T]{ - core: c, - opts: opts, - } -} - -// Core returns the central core instance, providing access to all registered services. -func (r *ServiceRuntime[T]) Core() *Core { - return r.core -} - -// Opts returns the service-specific options. -func (r *ServiceRuntime[T]) Opts() T { - return r.opts -} - -// Config returns the registered Config service from the core application. -// This is a convenience method for accessing the application's configuration. -func (r *ServiceRuntime[T]) Config() Config { - return r.core.Config() -} - -// Runtime is the container that holds all instantiated services. -// Its fields are the concrete types, allowing GUI runtimes to bind them directly. -// This struct is the primary entry point for the application. -type Runtime struct { - app any // GUI runtime (e.g., Wails App) - Core *Core -} - -// ServiceFactory defines a function that creates a service instance. -// This is used to decouple the service creation from the runtime initialization. -type ServiceFactory func() (any, error) - -// NewWithFactories creates a new Runtime instance using the provided service factories. -// This is the most flexible way to create a new Runtime, as it allows for -// the registration of any number of services. -func NewWithFactories(app any, factories map[string]ServiceFactory) (*Runtime, error) { - coreOpts := []Option{ - WithApp(app), - } - - names := make([]string, 0, len(factories)) - for name := range factories { - names = append(names, name) - } - sort.Strings(names) - - for _, name := range names { - factory := factories[name] - svc, err := factory() - if err != nil { - return nil, fmt.Errorf("failed to create service %s: %w", name, err) - } - svcCopy := svc - coreOpts = append(coreOpts, WithName(name, func(c *Core) (any, error) { return svcCopy, nil })) - } - - coreInstance, err := New(coreOpts...) - if err != nil { - return nil, err - } - - return &Runtime{ - app: app, - Core: coreInstance, - }, nil -} - -// NewRuntime creates and wires together all application services. -// This is the simplest way to create a new Runtime, but it does not allow for -// the registration of any custom services. -func NewRuntime(app any) (*Runtime, error) { - return NewWithFactories(app, map[string]ServiceFactory{}) -} - -// ServiceName returns the name of the service. This is used by GUI runtimes to identify the service. -func (r *Runtime) ServiceName() string { - return "Core" -} - -// ServiceStartup is called by the GUI runtime at application startup. -// This is where the Core's startup lifecycle is initiated. -func (r *Runtime) ServiceStartup(ctx context.Context, options any) { - r.Core.ServiceStartup(ctx, options) -} - -// ServiceShutdown is called by the GUI runtime at application shutdown. -// This is where the Core's shutdown lifecycle is initiated. -func (r *Runtime) ServiceShutdown(ctx context.Context) { - if r.Core != nil { - r.Core.ServiceShutdown(ctx) - } -} diff --git a/pkg/framework/core/runtime_pkg_extra_test.go b/pkg/framework/core/runtime_pkg_extra_test.go deleted file mode 100644 index c63a4a1..0000000 --- a/pkg/framework/core/runtime_pkg_extra_test.go +++ /dev/null @@ -1,18 +0,0 @@ -package core - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestNewWithFactories_EmptyName(t *testing.T) { - factories := map[string]ServiceFactory{ - "": func() (any, error) { - return &MockService{Name: "test"}, nil - }, - } - _, err := NewWithFactories(nil, factories) - assert.Error(t, err) - assert.Contains(t, err.Error(), "service name cannot be empty") -} diff --git a/pkg/framework/core/runtime_pkg_test.go b/pkg/framework/core/runtime_pkg_test.go deleted file mode 100644 index 0600d81..0000000 --- a/pkg/framework/core/runtime_pkg_test.go +++ /dev/null @@ -1,127 +0,0 @@ -package core - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestNewRuntime(t *testing.T) { - testCases := []struct { - name string - app any - factories map[string]ServiceFactory - expectErr bool - expectErrStr string - checkRuntime func(*testing.T, *Runtime) - }{ - { - name: "Good path", - app: nil, - factories: map[string]ServiceFactory{}, - expectErr: false, - checkRuntime: func(t *testing.T, rt *Runtime) { - assert.NotNil(t, rt) - assert.NotNil(t, rt.Core) - }, - }, - { - name: "With non-nil app", - app: &mockApp{}, - factories: map[string]ServiceFactory{}, - expectErr: false, - checkRuntime: func(t *testing.T, rt *Runtime) { - assert.NotNil(t, rt) - assert.NotNil(t, rt.Core) - assert.NotNil(t, rt.Core.App) - }, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - rt, err := NewRuntime(tc.app) - - if tc.expectErr { - assert.Error(t, err) - assert.Contains(t, err.Error(), tc.expectErrStr) - assert.Nil(t, rt) - } else { - assert.NoError(t, err) - if tc.checkRuntime != nil { - tc.checkRuntime(t, rt) - } - } - }) - } -} - -func TestNewWithFactories_Good(t *testing.T) { - factories := map[string]ServiceFactory{ - "test": func() (any, error) { - return &MockService{Name: "test"}, nil - }, - } - rt, err := NewWithFactories(nil, factories) - assert.NoError(t, err) - assert.NotNil(t, rt) - svc := rt.Core.Service("test") - assert.NotNil(t, svc) - mockSvc, ok := svc.(*MockService) - assert.True(t, ok) - assert.Equal(t, "test", mockSvc.Name) -} - -func TestNewWithFactories_Bad(t *testing.T) { - factories := map[string]ServiceFactory{ - "test": func() (any, error) { - return nil, assert.AnError - }, - } - _, err := NewWithFactories(nil, factories) - assert.Error(t, err) - assert.ErrorIs(t, err, assert.AnError) -} - -func TestNewWithFactories_Ugly(t *testing.T) { - factories := map[string]ServiceFactory{ - "test": nil, - } - assert.Panics(t, func() { - _, _ = NewWithFactories(nil, factories) - }) -} - -func TestRuntime_Lifecycle_Good(t *testing.T) { - rt, err := NewRuntime(nil) - assert.NoError(t, err) - assert.NotNil(t, rt) - - // ServiceName - assert.Equal(t, "Core", rt.ServiceName()) - - // ServiceStartup & ServiceShutdown - // These are simple wrappers around the core methods, which are tested in core_test.go. - // We call them here to ensure coverage. - rt.ServiceStartup(nil, nil) - rt.ServiceShutdown(nil) - - // Test shutdown with nil core - rt.Core = nil - rt.ServiceShutdown(nil) -} - -func TestNewServiceRuntime_Good(t *testing.T) { - c, err := New() - assert.NoError(t, err) - - sr := NewServiceRuntime(c, "test options") - assert.NotNil(t, sr) - assert.Equal(t, c, sr.Core()) - - // We can't directly test sr.Config() without a registered config service, - // but we can ensure it doesn't panic. We'll test the panic case separately. - assert.Panics(t, func() { - sr.Config() - }) -} diff --git a/pkg/framework/framework.go b/pkg/framework/framework.go deleted file mode 100644 index 1ce53fb..0000000 --- a/pkg/framework/framework.go +++ /dev/null @@ -1,71 +0,0 @@ -// Package framework provides the Core DI/service framework. -// Import this package for cleaner access to the framework types. -// -// Usage: -// -// import "github.com/host-uk/core/pkg/framework" -// -// app, _ := framework.New( -// framework.WithServiceLock(), -// ) -package framework - -import ( - "github.com/host-uk/core/pkg/framework/core" -) - -// Re-export core types for cleaner imports -type ( - Core = core.Core - Option = core.Option - Message = core.Message - Query = core.Query - Task = core.Task - QueryHandler = core.QueryHandler - TaskHandler = core.TaskHandler - Startable = core.Startable - Stoppable = core.Stoppable - Config = core.Config - Display = core.Display - WindowOption = core.WindowOption - Features = core.Features - Contract = core.Contract - Error = core.Error - ServiceRuntime[T any] = core.ServiceRuntime[T] - Runtime = core.Runtime - ServiceFactory = core.ServiceFactory -) - -// Re-export core functions -var ( - New = core.New - WithService = core.WithService - WithName = core.WithName - WithApp = core.WithApp - WithAssets = core.WithAssets - WithServiceLock = core.WithServiceLock - App = core.App - E = core.E - NewRuntime = core.NewRuntime - NewWithFactories = core.NewWithFactories -) - -// NewServiceRuntime creates a new ServiceRuntime for a service. -func NewServiceRuntime[T any](c *Core, opts T) *ServiceRuntime[T] { - return core.NewServiceRuntime(c, opts) -} - -// Re-export generic functions -func ServiceFor[T any](c *Core, name string) (T, error) { - return core.ServiceFor[T](c, name) -} - -func MustServiceFor[T any](c *Core, name string) T { - return core.MustServiceFor[T](c, name) -} - -// Action types -type ( - ActionServiceStartup = core.ActionServiceStartup - ActionServiceShutdown = core.ActionServiceShutdown -) diff --git a/pkg/git/git.go b/pkg/git/git.go deleted file mode 100644 index 0081737..0000000 --- a/pkg/git/git.go +++ /dev/null @@ -1,263 +0,0 @@ -// Package git provides utilities for git operations across multiple repositories. -package git - -import ( - "bytes" - "context" - "io" - "os" - "os/exec" - "strconv" - "strings" - "sync" -) - -// RepoStatus represents the git status of a single repository. -type RepoStatus struct { - Name string - Path string - Modified int - Untracked int - Staged int - Ahead int - Behind int - Branch string - Error error -} - -// IsDirty returns true if there are uncommitted changes. -func (s *RepoStatus) IsDirty() bool { - return s.Modified > 0 || s.Untracked > 0 || s.Staged > 0 -} - -// HasUnpushed returns true if there are commits to push. -func (s *RepoStatus) HasUnpushed() bool { - return s.Ahead > 0 -} - -// HasUnpulled returns true if there are commits to pull. -func (s *RepoStatus) HasUnpulled() bool { - return s.Behind > 0 -} - -// StatusOptions configures the status check. -type StatusOptions struct { - // Paths is a list of repo paths to check - Paths []string - // Names maps paths to display names - Names map[string]string -} - -// Status checks git status for multiple repositories in parallel. -func Status(ctx context.Context, opts StatusOptions) []RepoStatus { - var wg sync.WaitGroup - results := make([]RepoStatus, len(opts.Paths)) - - for i, path := range opts.Paths { - wg.Add(1) - go func(idx int, repoPath string) { - defer wg.Done() - name := opts.Names[repoPath] - if name == "" { - name = repoPath - } - results[idx] = getStatus(ctx, repoPath, name) - }(i, path) - } - - wg.Wait() - return results -} - -// getStatus gets the git status for a single repository. -func getStatus(ctx context.Context, path, name string) RepoStatus { - status := RepoStatus{ - Name: name, - Path: path, - } - - // Get current branch - branch, err := gitCommand(ctx, path, "rev-parse", "--abbrev-ref", "HEAD") - if err != nil { - status.Error = err - return status - } - status.Branch = strings.TrimSpace(branch) - - // Get porcelain status - porcelain, err := gitCommand(ctx, path, "status", "--porcelain") - if err != nil { - status.Error = err - return status - } - - // Parse status output - for _, line := range strings.Split(porcelain, "\n") { - if len(line) < 2 { - continue - } - x, y := line[0], line[1] - - // Untracked - if x == '?' && y == '?' { - status.Untracked++ - continue - } - - // Staged (index has changes) - if x == 'A' || x == 'D' || x == 'R' || x == 'M' { - status.Staged++ - } - - // Modified in working tree - if y == 'M' || y == 'D' { - status.Modified++ - } - } - - // Get ahead/behind counts - ahead, behind := getAheadBehind(ctx, path) - status.Ahead = ahead - status.Behind = behind - - return status -} - -// getAheadBehind returns the number of commits ahead and behind upstream. -func getAheadBehind(ctx context.Context, path string) (ahead, behind int) { - // Try to get ahead count - aheadStr, err := gitCommand(ctx, path, "rev-list", "--count", "@{u}..HEAD") - if err == nil { - ahead, _ = strconv.Atoi(strings.TrimSpace(aheadStr)) - } - - // Try to get behind count - behindStr, err := gitCommand(ctx, path, "rev-list", "--count", "HEAD..@{u}") - if err == nil { - behind, _ = strconv.Atoi(strings.TrimSpace(behindStr)) - } - - return ahead, behind -} - -// Push pushes commits for a single repository. -// Uses interactive mode to support SSH passphrase prompts. -func Push(ctx context.Context, path string) error { - return gitInteractive(ctx, path, "push") -} - -// Pull pulls changes for a single repository. -// Uses interactive mode to support SSH passphrase prompts. -func Pull(ctx context.Context, path string) error { - return gitInteractive(ctx, path, "pull", "--rebase") -} - -// IsNonFastForward checks if an error is a non-fast-forward rejection. -func IsNonFastForward(err error) bool { - if err == nil { - return false - } - msg := err.Error() - return strings.Contains(msg, "non-fast-forward") || - strings.Contains(msg, "fetch first") || - strings.Contains(msg, "tip of your current branch is behind") -} - -// gitInteractive runs a git command with terminal attached for user interaction. -func gitInteractive(ctx context.Context, dir string, args ...string) error { - cmd := exec.CommandContext(ctx, "git", args...) - cmd.Dir = dir - - // Connect to terminal for SSH passphrase prompts - cmd.Stdin = os.Stdin - cmd.Stdout = os.Stdout - - // Capture stderr for error reporting while also showing it - var stderr bytes.Buffer - cmd.Stderr = io.MultiWriter(os.Stderr, &stderr) - - if err := cmd.Run(); err != nil { - if stderr.Len() > 0 { - return &GitError{Err: err, Stderr: stderr.String()} - } - return err - } - - return nil -} - -// PushResult represents the result of a push operation. -type PushResult struct { - Name string - Path string - Success bool - Error error -} - -// PushMultiple pushes multiple repositories sequentially. -// Sequential because SSH passphrase prompts need user interaction. -func PushMultiple(ctx context.Context, paths []string, names map[string]string) []PushResult { - results := make([]PushResult, len(paths)) - - for i, path := range paths { - name := names[path] - if name == "" { - name = path - } - - result := PushResult{ - Name: name, - Path: path, - } - - err := Push(ctx, path) - if err != nil { - result.Error = err - } else { - result.Success = true - } - - results[i] = result - } - - return results -} - -// gitCommand runs a git command and returns stdout. -func gitCommand(ctx context.Context, dir string, args ...string) (string, error) { - cmd := exec.CommandContext(ctx, "git", args...) - cmd.Dir = dir - - var stdout, stderr bytes.Buffer - cmd.Stdout = &stdout - cmd.Stderr = &stderr - - if err := cmd.Run(); err != nil { - // Include stderr in error message for better diagnostics - if stderr.Len() > 0 { - return "", &GitError{Err: err, Stderr: stderr.String()} - } - return "", err - } - - return stdout.String(), nil -} - -// GitError wraps a git command error with stderr output. -type GitError struct { - Err error - Stderr string -} - -func (e *GitError) Error() string { - // Return just the stderr message, trimmed - msg := strings.TrimSpace(e.Stderr) - if msg != "" { - return msg - } - return e.Err.Error() -} - -func (e *GitError) Unwrap() error { - return e.Err -} diff --git a/pkg/git/service.go b/pkg/git/service.go deleted file mode 100644 index 2ed11da..0000000 --- a/pkg/git/service.go +++ /dev/null @@ -1,129 +0,0 @@ -package git - -import ( - "context" - - "github.com/host-uk/core/pkg/framework" -) - -// Queries for git service - -// QueryStatus requests git status for paths. -type QueryStatus struct { - Paths []string - Names map[string]string -} - -// QueryDirtyRepos requests repos with uncommitted changes. -type QueryDirtyRepos struct{} - -// QueryAheadRepos requests repos with unpushed commits. -type QueryAheadRepos struct{} - -// Tasks for git service - -// TaskPush requests git push for a path. -type TaskPush struct { - Path string - Name string -} - -// TaskPull requests git pull for a path. -type TaskPull struct { - Path string - Name string -} - -// TaskPushMultiple requests git push for multiple paths. -type TaskPushMultiple struct { - Paths []string - Names map[string]string -} - -// ServiceOptions for configuring the git service. -type ServiceOptions struct { - WorkDir string -} - -// Service provides git operations as a Core service. -type Service struct { - *framework.ServiceRuntime[ServiceOptions] - lastStatus []RepoStatus -} - -// NewService creates a git service factory. -func NewService(opts ServiceOptions) func(*framework.Core) (any, error) { - return func(c *framework.Core) (any, error) { - return &Service{ - ServiceRuntime: framework.NewServiceRuntime(c, opts), - }, nil - } -} - -// OnStartup registers query and task handlers. -func (s *Service) OnStartup(ctx context.Context) error { - s.Core().RegisterQuery(s.handleQuery) - s.Core().RegisterTask(s.handleTask) - return nil -} - -func (s *Service) handleQuery(c *framework.Core, q framework.Query) (any, bool, error) { - switch m := q.(type) { - case QueryStatus: - statuses := Status(context.Background(), StatusOptions{ - Paths: m.Paths, - Names: m.Names, - }) - s.lastStatus = statuses - return statuses, true, nil - - case QueryDirtyRepos: - return s.DirtyRepos(), true, nil - - case QueryAheadRepos: - return s.AheadRepos(), true, nil - } - return nil, false, nil -} - -func (s *Service) handleTask(c *framework.Core, t framework.Task) (any, bool, error) { - switch m := t.(type) { - case TaskPush: - err := Push(context.Background(), m.Path) - return nil, true, err - - case TaskPull: - err := Pull(context.Background(), m.Path) - return nil, true, err - - case TaskPushMultiple: - results := PushMultiple(context.Background(), m.Paths, m.Names) - return results, true, nil - } - return nil, false, nil -} - -// Status returns last status result. -func (s *Service) Status() []RepoStatus { return s.lastStatus } - -// DirtyRepos returns repos with uncommitted changes. -func (s *Service) DirtyRepos() []RepoStatus { - var dirty []RepoStatus - for _, st := range s.lastStatus { - if st.Error == nil && st.IsDirty() { - dirty = append(dirty, st) - } - } - return dirty -} - -// AheadRepos returns repos with unpushed commits. -func (s *Service) AheadRepos() []RepoStatus { - var ahead []RepoStatus - for _, st := range s.lastStatus { - if st.Error == nil && st.HasUnpushed() { - ahead = append(ahead, st) - } - } - return ahead -} diff --git a/pkg/go/cmd_commands.go b/pkg/go/cmd_commands.go deleted file mode 100644 index 0265557..0000000 --- a/pkg/go/cmd_commands.go +++ /dev/null @@ -1,21 +0,0 @@ -// Package gocmd provides Go development commands with enhanced output. -// -// Note: Package named gocmd because 'go' is a reserved keyword. -// -// Commands: -// - test: Run tests with colour-coded coverage summary -// - cov: Run tests with detailed coverage reports (HTML, thresholds) -// - fmt: Format code using goimports or gofmt -// - lint: Run golangci-lint -// - install: Install binary to $GOPATH/bin -// - mod: Module management (tidy, download, verify, graph) -// - work: Workspace management (sync, init, use) -// -// Sets MACOSX_DEPLOYMENT_TARGET to suppress linker warnings on macOS. -package gocmd - -import "github.com/host-uk/core/pkg/cli" - -func init() { - cli.RegisterCommands(AddGoCommands) -} diff --git a/pkg/go/cmd_format.go b/pkg/go/cmd_format.go deleted file mode 100644 index 59ce1c3..0000000 --- a/pkg/go/cmd_format.go +++ /dev/null @@ -1,79 +0,0 @@ -package gocmd - -import ( - "os" - "os/exec" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -var ( - fmtFix bool - fmtDiff bool - fmtCheck bool -) - -func addGoFmtCommand(parent *cli.Command) { - fmtCmd := &cli.Command{ - Use: "fmt", - Short: "Format Go code", - Long: "Format Go code using goimports or gofmt", - RunE: func(cmd *cli.Command, args []string) error { - fmtArgs := []string{} - if fmtFix { - fmtArgs = append(fmtArgs, "-w") - } - if fmtDiff { - fmtArgs = append(fmtArgs, "-d") - } - if !fmtFix && !fmtDiff { - fmtArgs = append(fmtArgs, "-l") - } - fmtArgs = append(fmtArgs, ".") - - // Try goimports first, fall back to gofmt - var execCmd *exec.Cmd - if _, err := exec.LookPath("goimports"); err == nil { - execCmd = exec.Command("goimports", fmtArgs...) - } else { - execCmd = exec.Command("gofmt", fmtArgs...) - } - - execCmd.Stdout = os.Stdout - execCmd.Stderr = os.Stderr - return execCmd.Run() - }, - } - - fmtCmd.Flags().BoolVar(&fmtFix, "fix", false, i18n.T("common.flag.fix")) - fmtCmd.Flags().BoolVar(&fmtDiff, "diff", false, "Show diff of changes") - fmtCmd.Flags().BoolVar(&fmtCheck, "check", false, "Check if formatted (exit 1 if not)") - - parent.AddCommand(fmtCmd) -} - -var lintFix bool - -func addGoLintCommand(parent *cli.Command) { - lintCmd := &cli.Command{ - Use: "lint", - Short: "Run golangci-lint", - Long: "Run golangci-lint for comprehensive static analysis", - RunE: func(cmd *cli.Command, args []string) error { - lintArgs := []string{"run"} - if lintFix { - lintArgs = append(lintArgs, "--fix") - } - - execCmd := exec.Command("golangci-lint", lintArgs...) - execCmd.Stdout = os.Stdout - execCmd.Stderr = os.Stderr - return execCmd.Run() - }, - } - - lintCmd.Flags().BoolVar(&lintFix, "fix", false, i18n.T("common.flag.fix")) - - parent.AddCommand(lintCmd) -} diff --git a/pkg/go/cmd_go.go b/pkg/go/cmd_go.go deleted file mode 100644 index 7aebd9f..0000000 --- a/pkg/go/cmd_go.go +++ /dev/null @@ -1,35 +0,0 @@ -// Package gocmd provides Go development commands. -// -// Note: Package named gocmd because 'go' is a reserved keyword. -package gocmd - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -// Style aliases for shared styles -var ( - successStyle = cli.SuccessStyle - errorStyle = cli.ErrorStyle - dimStyle = cli.DimStyle -) - -// AddGoCommands adds Go development commands. -func AddGoCommands(root *cli.Command) { - goCmd := &cli.Command{ - Use: "go", - Short: i18n.T("cmd.go.short"), - Long: i18n.T("cmd.go.long"), - } - - root.AddCommand(goCmd) - addGoQACommand(goCmd) - addGoTestCommand(goCmd) - addGoCovCommand(goCmd) - addGoFmtCommand(goCmd) - addGoLintCommand(goCmd) - addGoInstallCommand(goCmd) - addGoModCommand(goCmd) - addGoWorkCommand(goCmd) -} diff --git a/pkg/go/cmd_gotest.go b/pkg/go/cmd_gotest.go deleted file mode 100644 index c34364f..0000000 --- a/pkg/go/cmd_gotest.go +++ /dev/null @@ -1,331 +0,0 @@ -package gocmd - -import ( - "errors" - "fmt" - "os" - "os/exec" - "path/filepath" - "regexp" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -var ( - testCoverage bool - testPkg string - testRun string - testShort bool - testRace bool - testJSON bool - testVerbose bool -) - -func addGoTestCommand(parent *cli.Command) { - testCmd := &cli.Command{ - Use: "test", - Short: "Run Go tests", - Long: "Run Go tests with optional coverage, filtering, and race detection", - RunE: func(cmd *cli.Command, args []string) error { - return runGoTest(testCoverage, testPkg, testRun, testShort, testRace, testJSON, testVerbose) - }, - } - - testCmd.Flags().BoolVar(&testCoverage, "coverage", false, "Generate coverage report") - testCmd.Flags().StringVar(&testPkg, "pkg", "", "Package to test") - testCmd.Flags().StringVar(&testRun, "run", "", "Run only tests matching pattern") - testCmd.Flags().BoolVar(&testShort, "short", false, "Run only short tests") - testCmd.Flags().BoolVar(&testRace, "race", false, "Enable race detector") - testCmd.Flags().BoolVar(&testJSON, "json", false, "Output as JSON") - testCmd.Flags().BoolVarP(&testVerbose, "verbose", "v", false, "Verbose output") - - parent.AddCommand(testCmd) -} - -func runGoTest(coverage bool, pkg, run string, short, race, jsonOut, verbose bool) error { - if pkg == "" { - pkg = "./..." - } - - args := []string{"test"} - - if coverage { - args = append(args, "-cover") - } else { - args = append(args, "-cover") - } - - if run != "" { - args = append(args, "-run", run) - } - if short { - args = append(args, "-short") - } - if race { - args = append(args, "-race") - } - if verbose { - args = append(args, "-v") - } - - args = append(args, pkg) - - if !jsonOut { - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("test")), i18n.ProgressSubject("run", "tests")) - cli.Print(" %s %s\n", dimStyle.Render(i18n.Label("package")), pkg) - cli.Blank() - } - - cmd := exec.Command("go", args...) - cmd.Env = append(os.Environ(), "MACOSX_DEPLOYMENT_TARGET=26.0", "CGO_ENABLED=0") - cmd.Dir, _ = os.Getwd() - - output, err := cmd.CombinedOutput() - outputStr := string(output) - - // Filter linker warnings - lines := strings.Split(outputStr, "\n") - var filtered []string - for _, line := range lines { - if !strings.Contains(line, "ld: warning:") { - filtered = append(filtered, line) - } - } - outputStr = strings.Join(filtered, "\n") - - // Parse results - passed, failed, skipped := parseTestResults(outputStr) - cov := parseOverallCoverage(outputStr) - - if jsonOut { - cli.Print(`{"passed":%d,"failed":%d,"skipped":%d,"coverage":%.1f,"exit_code":%d}`, - passed, failed, skipped, cov, cmd.ProcessState.ExitCode()) - cli.Blank() - return err - } - - // Print filtered output if verbose or failed - if verbose || err != nil { - cli.Text(outputStr) - } - - // Summary - if err == nil { - cli.Print(" %s %s\n", successStyle.Render(cli.Glyph(":check:")), i18n.T("i18n.count.test", passed)+" "+i18n.T("i18n.done.pass")) - } else { - cli.Print(" %s %s, %s\n", errorStyle.Render(cli.Glyph(":cross:")), - i18n.T("i18n.count.test", passed)+" "+i18n.T("i18n.done.pass"), - i18n.T("i18n.count.test", failed)+" "+i18n.T("i18n.done.fail")) - } - - if cov > 0 { - cli.Print("\n %s %s\n", cli.KeyStyle.Render(i18n.Label("coverage")), formatCoverage(cov)) - } - - if err == nil { - cli.Print("\n%s\n", successStyle.Render(i18n.T("i18n.done.pass"))) - } else { - cli.Print("\n%s\n", errorStyle.Render(i18n.T("i18n.done.fail"))) - } - - return err -} - -func parseTestResults(output string) (passed, failed, skipped int) { - passRe := regexp.MustCompile(`(?m)^ok\s+`) - failRe := regexp.MustCompile(`(?m)^FAIL\s+`) - skipRe := regexp.MustCompile(`(?m)^\?\s+`) - - passed = len(passRe.FindAllString(output, -1)) - failed = len(failRe.FindAllString(output, -1)) - skipped = len(skipRe.FindAllString(output, -1)) - return -} - -func parseOverallCoverage(output string) float64 { - re := regexp.MustCompile(`coverage:\s+([\d.]+)%`) - matches := re.FindAllStringSubmatch(output, -1) - if len(matches) == 0 { - return 0 - } - - var total float64 - for _, m := range matches { - var cov float64 - fmt.Sscanf(m[1], "%f", &cov) - total += cov - } - return total / float64(len(matches)) -} - -var ( - covPkg string - covHTML bool - covOpen bool - covThreshold float64 -) - -func addGoCovCommand(parent *cli.Command) { - covCmd := &cli.Command{ - Use: "cov", - Short: "Run tests with coverage report", - Long: "Run tests with detailed coverage reports, HTML output, and threshold checking", - RunE: func(cmd *cli.Command, args []string) error { - pkg := covPkg - if pkg == "" { - // Auto-discover packages with tests - pkgs, err := findTestPackages(".") - if err != nil { - return cli.Wrap(err, i18n.T("i18n.fail.find", "test packages")) - } - if len(pkgs) == 0 { - return errors.New("no test packages found") - } - pkg = strings.Join(pkgs, " ") - } - - // Create temp file for coverage data - covFile, err := os.CreateTemp("", "coverage-*.out") - if err != nil { - return cli.Wrap(err, i18n.T("i18n.fail.create", "coverage file")) - } - covPath := covFile.Name() - covFile.Close() - defer os.Remove(covPath) - - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("coverage")), i18n.ProgressSubject("run", "tests")) - // Truncate package list if too long for display - displayPkg := pkg - if len(displayPkg) > 60 { - displayPkg = displayPkg[:57] + "..." - } - cli.Print(" %s %s\n", dimStyle.Render(i18n.Label("package")), displayPkg) - cli.Blank() - - // Run tests with coverage - // We need to split pkg into individual arguments if it contains spaces - pkgArgs := strings.Fields(pkg) - cmdArgs := append([]string{"test", "-coverprofile=" + covPath, "-covermode=atomic"}, pkgArgs...) - - goCmd := exec.Command("go", cmdArgs...) - goCmd.Env = append(os.Environ(), "MACOSX_DEPLOYMENT_TARGET=26.0") - goCmd.Stdout = os.Stdout - goCmd.Stderr = os.Stderr - - testErr := goCmd.Run() - - // Get coverage percentage - coverCmd := exec.Command("go", "tool", "cover", "-func="+covPath) - covOutput, err := coverCmd.Output() - if err != nil { - if testErr != nil { - return testErr - } - return cli.Wrap(err, i18n.T("i18n.fail.get", "coverage")) - } - - // Parse total coverage from last line - lines := strings.Split(strings.TrimSpace(string(covOutput)), "\n") - var totalCov float64 - if len(lines) > 0 { - lastLine := lines[len(lines)-1] - // Format: "total: (statements) XX.X%" - if strings.Contains(lastLine, "total:") { - parts := strings.Fields(lastLine) - if len(parts) >= 3 { - covStr := strings.TrimSuffix(parts[len(parts)-1], "%") - fmt.Sscanf(covStr, "%f", &totalCov) - } - } - } - - // Print coverage summary - cli.Blank() - cli.Print(" %s %s\n", cli.KeyStyle.Render(i18n.Label("total")), formatCoverage(totalCov)) - - // Generate HTML if requested - if covHTML || covOpen { - htmlPath := "coverage.html" - htmlCmd := exec.Command("go", "tool", "cover", "-html="+covPath, "-o="+htmlPath) - if err := htmlCmd.Run(); err != nil { - return cli.Wrap(err, i18n.T("i18n.fail.generate", "HTML")) - } - cli.Print(" %s %s\n", dimStyle.Render(i18n.Label("html")), htmlPath) - - if covOpen { - // Open in browser - var openCmd *exec.Cmd - switch { - case exec.Command("which", "open").Run() == nil: - openCmd = exec.Command("open", htmlPath) - case exec.Command("which", "xdg-open").Run() == nil: - openCmd = exec.Command("xdg-open", htmlPath) - default: - cli.Print(" %s\n", dimStyle.Render("Open coverage.html in your browser")) - } - if openCmd != nil { - openCmd.Run() - } - } - } - - // Check threshold - if covThreshold > 0 && totalCov < covThreshold { - cli.Print("\n%s %.1f%% < %.1f%%\n", errorStyle.Render(i18n.T("i18n.fail.meet", "threshold")), totalCov, covThreshold) - return errors.New("coverage below threshold") - } - - if testErr != nil { - return testErr - } - - cli.Print("\n%s\n", successStyle.Render(i18n.T("i18n.done.pass"))) - return nil - }, - } - - covCmd.Flags().StringVar(&covPkg, "pkg", "", "Package to test") - covCmd.Flags().BoolVar(&covHTML, "html", false, "Generate HTML report") - covCmd.Flags().BoolVar(&covOpen, "open", false, "Open HTML report in browser") - covCmd.Flags().Float64Var(&covThreshold, "threshold", 0, "Minimum coverage percentage") - - parent.AddCommand(covCmd) -} - -func findTestPackages(root string) ([]string, error) { - pkgMap := make(map[string]bool) - err := filepath.Walk(root, func(path string, info os.FileInfo, err error) error { - if err != nil { - return nil - } - if !info.IsDir() && strings.HasSuffix(info.Name(), "_test.go") { - dir := filepath.Dir(path) - if !strings.HasPrefix(dir, ".") { - dir = "./" + dir - } - pkgMap[dir] = true - } - return nil - }) - if err != nil { - return nil, err - } - - var pkgs []string - for pkg := range pkgMap { - pkgs = append(pkgs, pkg) - } - return pkgs, nil -} - -func formatCoverage(cov float64) string { - s := fmt.Sprintf("%.1f%%", cov) - if cov >= 80 { - return cli.SuccessStyle.Render(s) - } else if cov >= 50 { - return cli.WarningStyle.Render(s) - } - return cli.ErrorStyle.Render(s) -} diff --git a/pkg/go/cmd_qa.go b/pkg/go/cmd_qa.go deleted file mode 100644 index b3e4424..0000000 --- a/pkg/go/cmd_qa.go +++ /dev/null @@ -1,241 +0,0 @@ -package gocmd - -import ( - "context" - "os" - "os/exec" - "time" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -var qaFix bool - -func addGoQACommand(parent *cli.Command) { - qaCmd := &cli.Command{ - Use: "qa", - Short: "Run QA checks", - Long: "Run code quality checks: formatting, vetting, linting, and testing", - RunE: runGoQADefault, - } - - qaCmd.PersistentFlags().BoolVar(&qaFix, "fix", false, i18n.T("common.flag.fix")) - - // Subcommands for individual checks - qaCmd.AddCommand(&cli.Command{ - Use: "fmt", - Short: "Check/fix code formatting", - RunE: func(cmd *cli.Command, args []string) error { return runQAChecks([]string{"fmt"}) }, - }) - - qaCmd.AddCommand(&cli.Command{ - Use: "vet", - Short: "Run go vet", - RunE: func(cmd *cli.Command, args []string) error { return runQAChecks([]string{"vet"}) }, - }) - - qaCmd.AddCommand(&cli.Command{ - Use: "lint", - Short: "Run golangci-lint", - RunE: func(cmd *cli.Command, args []string) error { return runQAChecks([]string{"lint"}) }, - }) - - qaCmd.AddCommand(&cli.Command{ - Use: "test", - Short: "Run tests", - RunE: func(cmd *cli.Command, args []string) error { return runQAChecks([]string{"test"}) }, - }) - - qaCmd.AddCommand(&cli.Command{ - Use: "race", - Short: "Run tests with race detector", - RunE: func(cmd *cli.Command, args []string) error { return runQAChecks([]string{"race"}) }, - }) - - qaCmd.AddCommand(&cli.Command{ - Use: "vuln", - Short: "Check for vulnerabilities", - RunE: func(cmd *cli.Command, args []string) error { return runQAChecks([]string{"vuln"}) }, - }) - - qaCmd.AddCommand(&cli.Command{ - Use: "sec", - Short: "Run security scanner", - RunE: func(cmd *cli.Command, args []string) error { return runQAChecks([]string{"sec"}) }, - }) - - qaCmd.AddCommand(&cli.Command{ - Use: "quick", - Short: "Quick QA: fmt, vet, lint", - RunE: func(cmd *cli.Command, args []string) error { return runQAChecks([]string{"fmt", "vet", "lint"}) }, - }) - - qaCmd.AddCommand(&cli.Command{ - Use: "full", - Short: "Full QA: all checks including race, vuln, sec", - RunE: func(cmd *cli.Command, args []string) error { - return runQAChecks([]string{"fmt", "vet", "lint", "test", "race", "vuln", "sec"}) - }, - }) - - parent.AddCommand(qaCmd) -} - -// runGoQADefault runs the default QA checks (fmt, vet, lint, test) -func runGoQADefault(cmd *cli.Command, args []string) error { - return runQAChecks([]string{"fmt", "vet", "lint", "test"}) -} - -// QACheck represents a single QA check. -type QACheck struct { - Name string - Command string - Args []string -} - -func runQAChecks(checkNames []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Wrap(err, i18n.T("i18n.fail.get", "working directory")) - } - - // Detect if this is a Go project - if _, err := os.Stat("go.mod"); os.IsNotExist(err) { - return cli.Err("not a Go project (no %s found)", i18n.T("gram.word.go_mod")) - } - - cli.Print("%s %s\n\n", cli.DimStyle.Render(i18n.Label("qa")), i18n.ProgressSubject("run", "Go QA")) - - checks := buildChecksForNames(checkNames) - - ctx := context.Background() - startTime := time.Now() - passed := 0 - failed := 0 - - for _, check := range checks { - cli.Print("%s %s\n", cli.DimStyle.Render("→"), i18n.Progress(check.Name)) - - if err := runCheck(ctx, cwd, check); err != nil { - cli.Print(" %s %s\n", cli.ErrorStyle.Render(cli.Glyph(":cross:")), err.Error()) - failed++ - } else { - cli.Print(" %s %s\n", cli.SuccessStyle.Render(cli.Glyph(":check:")), i18n.T("i18n.done.pass")) - passed++ - } - } - - // Summary - cli.Blank() - duration := time.Since(startTime).Round(time.Millisecond) - - if failed > 0 { - cli.Print("%s %s, %s (%s)\n", - cli.ErrorStyle.Render(cli.Glyph(":cross:")), - i18n.T("i18n.count.check", passed)+" "+i18n.T("i18n.done.pass"), - i18n.T("i18n.count.check", failed)+" "+i18n.T("i18n.done.fail"), - duration) - os.Exit(1) - } - - cli.Print("%s %s (%s)\n", - cli.SuccessStyle.Render(cli.Glyph(":check:")), - i18n.T("i18n.count.check", passed)+" "+i18n.T("i18n.done.pass"), - duration) - - return nil -} - -func buildChecksForNames(names []string) []QACheck { - allChecks := map[string]QACheck{ - "fmt": { - Name: "format", - Command: "gofmt", - Args: fmtArgs(qaFix), - }, - "vet": { - Name: "vet", - Command: "go", - Args: []string{"vet", "./..."}, - }, - "lint": { - Name: "lint", - Command: "golangci-lint", - Args: lintArgs(qaFix), - }, - "test": { - Name: "test", - Command: "go", - Args: []string{"test", "./..."}, - }, - "race": { - Name: "test", - Command: "go", - Args: []string{"test", "-race", "./..."}, - }, - "vuln": { - Name: "scan", - Command: "govulncheck", - Args: []string{"./..."}, - }, - "sec": { - Name: "scan", - Command: "gosec", - Args: []string{"-quiet", "./..."}, - }, - } - - var checks []QACheck - for _, name := range names { - if check, ok := allChecks[name]; ok { - checks = append(checks, check) - } - } - return checks -} - -func fmtArgs(fix bool) []string { - if fix { - return []string{"-w", "."} - } - return []string{"-l", "."} -} - -func lintArgs(fix bool) []string { - args := []string{"run"} - if fix { - args = append(args, "--fix") - } - args = append(args, "./...") - return args -} - -func runCheck(ctx context.Context, dir string, check QACheck) error { - // Check if command exists - if _, err := exec.LookPath(check.Command); err != nil { - return cli.Err("%s: %s", check.Command, i18n.T("i18n.done.miss")) - } - - cmd := exec.CommandContext(ctx, check.Command, check.Args...) - cmd.Dir = dir - - // For gofmt -l, capture output to check if files need formatting - if check.Name == "format" && len(check.Args) > 0 && check.Args[0] == "-l" { - output, err := cmd.Output() - if err != nil { - return err - } - if len(output) > 0 { - // Show files that need formatting - cli.Text(string(output)) - return cli.Err("%s (use --fix)", i18n.T("i18n.fail.format", i18n.T("i18n.count.file", len(output)))) - } - return nil - } - - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - return cmd.Run() -} diff --git a/pkg/go/cmd_tools.go b/pkg/go/cmd_tools.go deleted file mode 100644 index fd080ff..0000000 --- a/pkg/go/cmd_tools.go +++ /dev/null @@ -1,236 +0,0 @@ -package gocmd - -import ( - "errors" - "os" - "os/exec" - "path/filepath" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -var ( - installVerbose bool - installNoCgo bool -) - -func addGoInstallCommand(parent *cli.Command) { - installCmd := &cli.Command{ - Use: "install [path]", - Short: "Install Go binary", - Long: "Install Go binary to $GOPATH/bin", - RunE: func(cmd *cli.Command, args []string) error { - // Get install path from args or default to current dir - installPath := "./..." - if len(args) > 0 { - installPath = args[0] - } - - // Detect if we're in a module with cmd/ subdirectories or a root main.go - if installPath == "./..." { - if _, err := os.Stat("core.go"); err == nil { - installPath = "." - } else if entries, err := os.ReadDir("cmd"); err == nil && len(entries) > 0 { - installPath = "./cmd/..." - } else if _, err := os.Stat("main.go"); err == nil { - installPath = "." - } - } - - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("install")), i18n.Progress("install")) - cli.Print(" %s %s\n", dimStyle.Render(i18n.Label("path")), installPath) - if installNoCgo { - cli.Print(" %s %s\n", dimStyle.Render(i18n.Label("cgo")), "disabled") - } - - cmdArgs := []string{"install"} - if installVerbose { - cmdArgs = append(cmdArgs, "-v") - } - cmdArgs = append(cmdArgs, installPath) - - execCmd := exec.Command("go", cmdArgs...) - if installNoCgo { - execCmd.Env = append(os.Environ(), "CGO_ENABLED=0") - } - execCmd.Stdout = os.Stdout - execCmd.Stderr = os.Stderr - - if err := execCmd.Run(); err != nil { - cli.Print("\n%s\n", errorStyle.Render(i18n.T("i18n.fail.install", "binary"))) - return err - } - - // Show where it was installed - gopath := os.Getenv("GOPATH") - if gopath == "" { - home, _ := os.UserHomeDir() - gopath = filepath.Join(home, "go") - } - binDir := filepath.Join(gopath, "bin") - - cli.Print("\n%s %s\n", successStyle.Render(i18n.T("i18n.done.install")), binDir) - return nil - }, - } - - installCmd.Flags().BoolVarP(&installVerbose, "verbose", "v", false, "Verbose output") - installCmd.Flags().BoolVar(&installNoCgo, "no-cgo", false, "Disable CGO") - - parent.AddCommand(installCmd) -} - -func addGoModCommand(parent *cli.Command) { - modCmd := &cli.Command{ - Use: "mod", - Short: "Module management", - Long: "Go module management commands", - } - - // tidy - tidyCmd := &cli.Command{ - Use: "tidy", - Short: "Run go mod tidy", - RunE: func(cmd *cli.Command, args []string) error { - execCmd := exec.Command("go", "mod", "tidy") - execCmd.Stdout = os.Stdout - execCmd.Stderr = os.Stderr - return execCmd.Run() - }, - } - - // download - downloadCmd := &cli.Command{ - Use: "download", - Short: "Download module dependencies", - RunE: func(cmd *cli.Command, args []string) error { - execCmd := exec.Command("go", "mod", "download") - execCmd.Stdout = os.Stdout - execCmd.Stderr = os.Stderr - return execCmd.Run() - }, - } - - // verify - verifyCmd := &cli.Command{ - Use: "verify", - Short: "Verify module checksums", - RunE: func(cmd *cli.Command, args []string) error { - execCmd := exec.Command("go", "mod", "verify") - execCmd.Stdout = os.Stdout - execCmd.Stderr = os.Stderr - return execCmd.Run() - }, - } - - // graph - graphCmd := &cli.Command{ - Use: "graph", - Short: "Print module dependency graph", - RunE: func(cmd *cli.Command, args []string) error { - execCmd := exec.Command("go", "mod", "graph") - execCmd.Stdout = os.Stdout - execCmd.Stderr = os.Stderr - return execCmd.Run() - }, - } - - modCmd.AddCommand(tidyCmd) - modCmd.AddCommand(downloadCmd) - modCmd.AddCommand(verifyCmd) - modCmd.AddCommand(graphCmd) - parent.AddCommand(modCmd) -} - -func addGoWorkCommand(parent *cli.Command) { - workCmd := &cli.Command{ - Use: "work", - Short: "Workspace management", - Long: "Go workspace management commands", - } - - // sync - syncCmd := &cli.Command{ - Use: "sync", - Short: "Sync workspace modules", - RunE: func(cmd *cli.Command, args []string) error { - execCmd := exec.Command("go", "work", "sync") - execCmd.Stdout = os.Stdout - execCmd.Stderr = os.Stderr - return execCmd.Run() - }, - } - - // init - initCmd := &cli.Command{ - Use: "init", - Short: "Initialise a new workspace", - RunE: func(cmd *cli.Command, args []string) error { - execCmd := exec.Command("go", "work", "init") - execCmd.Stdout = os.Stdout - execCmd.Stderr = os.Stderr - if err := execCmd.Run(); err != nil { - return err - } - // Auto-add current module if go.mod exists - if _, err := os.Stat("go.mod"); err == nil { - execCmd = exec.Command("go", "work", "use", ".") - execCmd.Stdout = os.Stdout - execCmd.Stderr = os.Stderr - return execCmd.Run() - } - return nil - }, - } - - // use - useCmd := &cli.Command{ - Use: "use [modules...]", - Short: "Add modules to workspace", - RunE: func(cmd *cli.Command, args []string) error { - if len(args) == 0 { - // Auto-detect modules - modules := findGoModules(".") - if len(modules) == 0 { - return errors.New("no Go modules found") - } - for _, mod := range modules { - execCmd := exec.Command("go", "work", "use", mod) - execCmd.Stdout = os.Stdout - execCmd.Stderr = os.Stderr - if err := execCmd.Run(); err != nil { - return err - } - cli.Print("%s %s\n", successStyle.Render(i18n.T("i18n.done.add")), mod) - } - return nil - } - - cmdArgs := append([]string{"work", "use"}, args...) - execCmd := exec.Command("go", cmdArgs...) - execCmd.Stdout = os.Stdout - execCmd.Stderr = os.Stderr - return execCmd.Run() - }, - } - - workCmd.AddCommand(syncCmd) - workCmd.AddCommand(initCmd) - workCmd.AddCommand(useCmd) - parent.AddCommand(workCmd) -} - -func findGoModules(root string) []string { - var modules []string - filepath.Walk(root, func(path string, info os.FileInfo, err error) error { - if err != nil { - return nil - } - if info.Name() == "go.mod" && path != "go.mod" { - modules = append(modules, filepath.Dir(path)) - } - return nil - }) - return modules -} diff --git a/pkg/i18n/compose.go b/pkg/i18n/compose.go deleted file mode 100644 index b72ad17..0000000 --- a/pkg/i18n/compose.go +++ /dev/null @@ -1,184 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -import ( - "fmt" -) - -// S creates a new Subject with the given noun and value. -// The noun is used for grammar rules, the value for display. -// -// S("file", "config.yaml") // "config.yaml" -// S("repo", repo) // Uses repo.String() or fmt.Sprint() -// S("file", path).Count(3).In("workspace") -func S(noun string, value any) *Subject { - return &Subject{ - Noun: noun, - Value: value, - count: 1, // Default to singular - } -} - -// Count sets the count for pluralization. -// Used to determine singular/plural forms in templates. -// -// S("file", files).Count(len(files)) -func (s *Subject) Count(n int) *Subject { - if s == nil { - return nil - } - s.count = n - return s -} - -// Gender sets the grammatical gender for languages that require it. -// Common values: "masculine", "feminine", "neuter" -// -// S("user", user).Gender("female") -func (s *Subject) Gender(g string) *Subject { - if s == nil { - return nil - } - s.gender = g - return s -} - -// In sets the location context for the subject. -// Used in templates to provide spatial context. -// -// S("file", "config.yaml").In("workspace") -func (s *Subject) In(location string) *Subject { - if s == nil { - return nil - } - s.location = location - return s -} - -// Formal sets the formality level to formal (Sie, vous, usted). -// Use for polite/professional address in languages that distinguish formality. -// -// S("colleague", name).Formal() -func (s *Subject) Formal() *Subject { - if s == nil { - return nil - } - s.formality = FormalityFormal - return s -} - -// Informal sets the formality level to informal (du, tu, tú). -// Use for casual/friendly address in languages that distinguish formality. -// -// S("friend", name).Informal() -func (s *Subject) Informal() *Subject { - if s == nil { - return nil - } - s.formality = FormalityInformal - return s -} - -// Formality sets the formality level explicitly. -// -// S("user", name).Formality(FormalityFormal) -func (s *Subject) Formality(f Formality) *Subject { - if s == nil { - return nil - } - s.formality = f - return s -} - -// String returns the display value of the subject. -func (s *Subject) String() string { - if s == nil { - return "" - } - if stringer, ok := s.Value.(fmt.Stringer); ok { - return stringer.String() - } - return fmt.Sprint(s.Value) -} - -// IsPlural returns true if count != 1. -func (s *Subject) IsPlural() bool { - return s != nil && s.count != 1 -} - -// CountInt returns the count value. -func (s *Subject) CountInt() int { - if s == nil { - return 1 - } - return s.count -} - -// CountString returns the count as a string. -func (s *Subject) CountString() string { - if s == nil { - return "1" - } - return fmt.Sprint(s.count) -} - -// GenderString returns the grammatical gender. -func (s *Subject) GenderString() string { - if s == nil { - return "" - } - return s.gender -} - -// LocationString returns the location context. -func (s *Subject) LocationString() string { - if s == nil { - return "" - } - return s.location -} - -// NounString returns the noun type. -func (s *Subject) NounString() string { - if s == nil { - return "" - } - return s.Noun -} - -// FormalityString returns the formality level as a string. -// Returns "neutral" if not explicitly set. -func (s *Subject) FormalityString() string { - if s == nil { - return FormalityNeutral.String() - } - return s.formality.String() -} - -// IsFormal returns true if formal address should be used. -func (s *Subject) IsFormal() bool { - return s != nil && s.formality == FormalityFormal -} - -// IsInformal returns true if informal address should be used. -func (s *Subject) IsInformal() bool { - return s != nil && s.formality == FormalityInformal -} - -// newTemplateData creates templateData from a Subject. -func newTemplateData(s *Subject) templateData { - if s == nil { - return templateData{Count: 1} - } - return templateData{ - Subject: s.String(), - Noun: s.Noun, - Count: s.count, - Gender: s.gender, - Location: s.location, - Formality: s.formality, - IsFormal: s.formality == FormalityFormal, - IsPlural: s.count != 1, - Value: s.Value, - } -} diff --git a/pkg/i18n/compose_data_test.go b/pkg/i18n/compose_data_test.go deleted file mode 100644 index c1433c5..0000000 --- a/pkg/i18n/compose_data_test.go +++ /dev/null @@ -1,679 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -import ( - "sync" -) - -// coreIntents defines the built-in semantic intents for common operations. -// These are accessed via the "core.*" namespace in T() and C() calls. -// -// Each intent provides templates for all output forms: -// - Question: Initial prompt to the user -// - Confirm: Secondary confirmation (for dangerous actions) -// - Success: Message shown on successful completion -// - Failure: Message shown on failure -// -// Templates use Go text/template syntax with the following data available: -// - .Subject: Display value of the subject -// - .Noun: The noun type (e.g., "file", "repo") -// - .Count: Count for pluralization -// - .Location: Location context -// -// Template functions available: -// - title, lower, upper: Case transformations -// - past, gerund: Verb conjugations -// - plural, pluralForm: Noun pluralization -// - article: Indefinite article selection (a/an) -// - quote: Wrap in double quotes -var coreIntents = map[string]Intent{ - // --- Destructive Actions --- - - "core.delete": { - Meta: IntentMeta{ - Type: "action", - Verb: "delete", - Dangerous: true, - Default: "no", - }, - Question: "Delete {{.Subject}}?", - Confirm: "Really delete {{.Subject}}? This cannot be undone.", - Success: "{{.Subject | title}} deleted", - Failure: "Failed to delete {{.Subject}}", - }, - - "core.remove": { - Meta: IntentMeta{ - Type: "action", - Verb: "remove", - Dangerous: true, - Default: "no", - }, - Question: "Remove {{.Subject}}?", - Confirm: "Really remove {{.Subject}}?", - Success: "{{.Subject | title}} removed", - Failure: "Failed to remove {{.Subject}}", - }, - - "core.discard": { - Meta: IntentMeta{ - Type: "action", - Verb: "discard", - Dangerous: true, - Default: "no", - }, - Question: "Discard {{.Subject}}?", - Confirm: "Really discard {{.Subject}}? All changes will be lost.", - Success: "{{.Subject | title}} discarded", - Failure: "Failed to discard {{.Subject}}", - }, - - "core.reset": { - Meta: IntentMeta{ - Type: "action", - Verb: "reset", - Dangerous: true, - Default: "no", - }, - Question: "Reset {{.Subject}}?", - Confirm: "Really reset {{.Subject}}? This cannot be undone.", - Success: "{{.Subject | title}} reset", - Failure: "Failed to reset {{.Subject}}", - }, - - "core.overwrite": { - Meta: IntentMeta{ - Type: "action", - Verb: "overwrite", - Dangerous: true, - Default: "no", - }, - Question: "Overwrite {{.Subject}}?", - Confirm: "Really overwrite {{.Subject}}? Existing content will be lost.", - Success: "{{.Subject | title}} overwritten", - Failure: "Failed to overwrite {{.Subject}}", - }, - - // --- Creation Actions --- - - "core.create": { - Meta: IntentMeta{ - Type: "action", - Verb: "create", - Default: "yes", - }, - Question: "Create {{.Subject}}?", - Confirm: "Create {{.Subject}}?", - Success: "{{.Subject | title}} created", - Failure: "Failed to create {{.Subject}}", - }, - - "core.add": { - Meta: IntentMeta{ - Type: "action", - Verb: "add", - Default: "yes", - }, - Question: "Add {{.Subject}}?", - Confirm: "Add {{.Subject}}?", - Success: "{{.Subject | title}} added", - Failure: "Failed to add {{.Subject}}", - }, - - "core.clone": { - Meta: IntentMeta{ - Type: "action", - Verb: "clone", - Default: "yes", - }, - Question: "Clone {{.Subject}}?", - Confirm: "Clone {{.Subject}}?", - Success: "{{.Subject | title}} cloned", - Failure: "Failed to clone {{.Subject}}", - }, - - "core.copy": { - Meta: IntentMeta{ - Type: "action", - Verb: "copy", - Default: "yes", - }, - Question: "Copy {{.Subject}}?", - Confirm: "Copy {{.Subject}}?", - Success: "{{.Subject | title}} copied", - Failure: "Failed to copy {{.Subject}}", - }, - - // --- Modification Actions --- - - "core.save": { - Meta: IntentMeta{ - Type: "action", - Verb: "save", - Default: "yes", - }, - Question: "Save {{.Subject}}?", - Confirm: "Save {{.Subject}}?", - Success: "{{.Subject | title}} saved", - Failure: "Failed to save {{.Subject}}", - }, - - "core.update": { - Meta: IntentMeta{ - Type: "action", - Verb: "update", - Default: "yes", - }, - Question: "Update {{.Subject}}?", - Confirm: "Update {{.Subject}}?", - Success: "{{.Subject | title}} updated", - Failure: "Failed to update {{.Subject}}", - }, - - "core.rename": { - Meta: IntentMeta{ - Type: "action", - Verb: "rename", - Default: "yes", - }, - Question: "Rename {{.Subject}}?", - Confirm: "Rename {{.Subject}}?", - Success: "{{.Subject | title}} renamed", - Failure: "Failed to rename {{.Subject}}", - }, - - "core.move": { - Meta: IntentMeta{ - Type: "action", - Verb: "move", - Default: "yes", - }, - Question: "Move {{.Subject}}?", - Confirm: "Move {{.Subject}}?", - Success: "{{.Subject | title}} moved", - Failure: "Failed to move {{.Subject}}", - }, - - // --- Git Actions --- - - "core.commit": { - Meta: IntentMeta{ - Type: "action", - Verb: "commit", - Default: "yes", - }, - Question: "Commit {{.Subject}}?", - Confirm: "Commit {{.Subject}}?", - Success: "{{.Subject | title}} committed", - Failure: "Failed to commit {{.Subject}}", - }, - - "core.push": { - Meta: IntentMeta{ - Type: "action", - Verb: "push", - Default: "yes", - }, - Question: "Push {{.Subject}}?", - Confirm: "Push {{.Subject}}?", - Success: "{{.Subject | title}} pushed", - Failure: "Failed to push {{.Subject}}", - }, - - "core.pull": { - Meta: IntentMeta{ - Type: "action", - Verb: "pull", - Default: "yes", - }, - Question: "Pull {{.Subject}}?", - Confirm: "Pull {{.Subject}}?", - Success: "{{.Subject | title}} pulled", - Failure: "Failed to pull {{.Subject}}", - }, - - "core.merge": { - Meta: IntentMeta{ - Type: "action", - Verb: "merge", - Dangerous: true, - Default: "no", - }, - Question: "Merge {{.Subject}}?", - Confirm: "Really merge {{.Subject}}?", - Success: "{{.Subject | title}} merged", - Failure: "Failed to merge {{.Subject}}", - }, - - "core.rebase": { - Meta: IntentMeta{ - Type: "action", - Verb: "rebase", - Dangerous: true, - Default: "no", - }, - Question: "Rebase {{.Subject}}?", - Confirm: "Really rebase {{.Subject}}? This rewrites history.", - Success: "{{.Subject | title}} rebased", - Failure: "Failed to rebase {{.Subject}}", - }, - - // --- Network Actions --- - - "core.install": { - Meta: IntentMeta{ - Type: "action", - Verb: "install", - Default: "yes", - }, - Question: "Install {{.Subject}}?", - Confirm: "Install {{.Subject}}?", - Success: "{{.Subject | title}} installed", - Failure: "Failed to install {{.Subject}}", - }, - - "core.download": { - Meta: IntentMeta{ - Type: "action", - Verb: "download", - Default: "yes", - }, - Question: "Download {{.Subject}}?", - Confirm: "Download {{.Subject}}?", - Success: "{{.Subject | title}} downloaded", - Failure: "Failed to download {{.Subject}}", - }, - - "core.upload": { - Meta: IntentMeta{ - Type: "action", - Verb: "upload", - Default: "yes", - }, - Question: "Upload {{.Subject}}?", - Confirm: "Upload {{.Subject}}?", - Success: "{{.Subject | title}} uploaded", - Failure: "Failed to upload {{.Subject}}", - }, - - "core.publish": { - Meta: IntentMeta{ - Type: "action", - Verb: "publish", - Dangerous: true, - Default: "no", - }, - Question: "Publish {{.Subject}}?", - Confirm: "Really publish {{.Subject}}? This will be publicly visible.", - Success: "{{.Subject | title}} published", - Failure: "Failed to publish {{.Subject}}", - }, - - "core.deploy": { - Meta: IntentMeta{ - Type: "action", - Verb: "deploy", - Dangerous: true, - Default: "no", - }, - Question: "Deploy {{.Subject}}?", - Confirm: "Really deploy {{.Subject}}?", - Success: "{{.Subject | title}} deployed", - Failure: "Failed to deploy {{.Subject}}", - }, - - // --- Process Actions --- - - "core.start": { - Meta: IntentMeta{ - Type: "action", - Verb: "start", - Default: "yes", - }, - Question: "Start {{.Subject}}?", - Confirm: "Start {{.Subject}}?", - Success: "{{.Subject | title}} started", - Failure: "Failed to start {{.Subject}}", - }, - - "core.stop": { - Meta: IntentMeta{ - Type: "action", - Verb: "stop", - Default: "yes", - }, - Question: "Stop {{.Subject}}?", - Confirm: "Stop {{.Subject}}?", - Success: "{{.Subject | title}} stopped", - Failure: "Failed to stop {{.Subject}}", - }, - - "core.restart": { - Meta: IntentMeta{ - Type: "action", - Verb: "restart", - Default: "yes", - }, - Question: "Restart {{.Subject}}?", - Confirm: "Restart {{.Subject}}?", - Success: "{{.Subject | title}} restarted", - Failure: "Failed to restart {{.Subject}}", - }, - - "core.run": { - Meta: IntentMeta{ - Type: "action", - Verb: "run", - Default: "yes", - }, - Question: "Run {{.Subject}}?", - Confirm: "Run {{.Subject}}?", - Success: "{{.Subject | title}} completed", - Failure: "Failed to run {{.Subject}}", - }, - - "core.build": { - Meta: IntentMeta{ - Type: "action", - Verb: "build", - Default: "yes", - }, - Question: "Build {{.Subject}}?", - Confirm: "Build {{.Subject}}?", - Success: "{{.Subject | title}} built", - Failure: "Failed to build {{.Subject}}", - }, - - "core.test": { - Meta: IntentMeta{ - Type: "action", - Verb: "test", - Default: "yes", - }, - Question: "Test {{.Subject}}?", - Confirm: "Test {{.Subject}}?", - Success: "{{.Subject | title}} passed", - Failure: "{{.Subject | title}} failed", - }, - - // --- Information Actions --- - - "core.continue": { - Meta: IntentMeta{ - Type: "question", - Verb: "continue", - Default: "yes", - }, - Question: "Continue?", - Confirm: "Continue?", - Success: "Continuing", - Failure: "Aborted", - }, - - "core.proceed": { - Meta: IntentMeta{ - Type: "question", - Verb: "proceed", - Default: "yes", - }, - Question: "Proceed?", - Confirm: "Proceed?", - Success: "Proceeding", - Failure: "Aborted", - }, - - "core.confirm": { - Meta: IntentMeta{ - Type: "question", - Verb: "confirm", - Default: "no", - }, - Question: "Are you sure?", - Confirm: "Are you sure?", - Success: "Confirmed", - Failure: "Cancelled", - }, - - // --- Additional Actions --- - - "core.sync": { - Meta: IntentMeta{ - Type: "action", - Verb: "sync", - Default: "yes", - }, - Question: "Sync {{.Subject}}?", - Confirm: "Sync {{.Subject}}?", - Success: "{{.Subject | title}} synced", - Failure: "Failed to sync {{.Subject}}", - }, - - "core.boot": { - Meta: IntentMeta{ - Type: "action", - Verb: "boot", - Default: "yes", - }, - Question: "Boot {{.Subject}}?", - Confirm: "Boot {{.Subject}}?", - Success: "{{.Subject | title}} booted", - Failure: "Failed to boot {{.Subject}}", - }, - - "core.format": { - Meta: IntentMeta{ - Type: "action", - Verb: "format", - Default: "yes", - }, - Question: "Format {{.Subject}}?", - Confirm: "Format {{.Subject}}?", - Success: "{{.Subject | title}} formatted", - Failure: "Failed to format {{.Subject}}", - }, - - "core.analyse": { - Meta: IntentMeta{ - Type: "action", - Verb: "analyse", - Default: "yes", - }, - Question: "Analyse {{.Subject}}?", - Confirm: "Analyse {{.Subject}}?", - Success: "{{.Subject | title}} analysed", - Failure: "Failed to analyse {{.Subject}}", - }, - - "core.link": { - Meta: IntentMeta{ - Type: "action", - Verb: "link", - Default: "yes", - }, - Question: "Link {{.Subject}}?", - Confirm: "Link {{.Subject}}?", - Success: "{{.Subject | title}} linked", - Failure: "Failed to link {{.Subject}}", - }, - - "core.unlink": { - Meta: IntentMeta{ - Type: "action", - Verb: "unlink", - Default: "yes", - }, - Question: "Unlink {{.Subject}}?", - Confirm: "Unlink {{.Subject}}?", - Success: "{{.Subject | title}} unlinked", - Failure: "Failed to unlink {{.Subject}}", - }, - - "core.fetch": { - Meta: IntentMeta{ - Type: "action", - Verb: "fetch", - Default: "yes", - }, - Question: "Fetch {{.Subject}}?", - Confirm: "Fetch {{.Subject}}?", - Success: "{{.Subject | title}} fetched", - Failure: "Failed to fetch {{.Subject}}", - }, - - "core.generate": { - Meta: IntentMeta{ - Type: "action", - Verb: "generate", - Default: "yes", - }, - Question: "Generate {{.Subject}}?", - Confirm: "Generate {{.Subject}}?", - Success: "{{.Subject | title}} generated", - Failure: "Failed to generate {{.Subject}}", - }, - - "core.validate": { - Meta: IntentMeta{ - Type: "action", - Verb: "validate", - Default: "yes", - }, - Question: "Validate {{.Subject}}?", - Confirm: "Validate {{.Subject}}?", - Success: "{{.Subject | title}} valid", - Failure: "{{.Subject | title}} invalid", - }, - - "core.check": { - Meta: IntentMeta{ - Type: "action", - Verb: "check", - Default: "yes", - }, - Question: "Check {{.Subject}}?", - Confirm: "Check {{.Subject}}?", - Success: "{{.Subject | title}} OK", - Failure: "{{.Subject | title}} failed", - }, - - "core.scan": { - Meta: IntentMeta{ - Type: "action", - Verb: "scan", - Default: "yes", - }, - Question: "Scan {{.Subject}}?", - Confirm: "Scan {{.Subject}}?", - Success: "{{.Subject | title}} scanned", - Failure: "Failed to scan {{.Subject}}", - }, -} - -// customIntents holds user-registered intents. -// Separated from coreIntents to allow thread-safe registration. -var ( - customIntents = make(map[string]Intent) - customIntentsMu sync.RWMutex -) - -// getIntent retrieves an intent by its key. -// Checks custom intents first, then falls back to core intents. -// Returns nil if the intent is not found. -func getIntent(key string) *Intent { - // Check custom intents first (thread-safe) - customIntentsMu.RLock() - if intent, ok := customIntents[key]; ok { - customIntentsMu.RUnlock() - return &intent - } - customIntentsMu.RUnlock() - - // Fall back to core intents - if intent, ok := coreIntents[key]; ok { - return &intent - } - return nil -} - -// RegisterIntent adds a custom intent at runtime. -// Use this to extend the built-in intents with application-specific ones. -// This function is thread-safe. -// -// i18n.RegisterIntent("myapp.archive", i18n.Intent{ -// Meta: i18n.IntentMeta{Type: "action", Verb: "archive", Default: "yes"}, -// Question: "Archive {{.Subject}}?", -// Success: "{{.Subject | title}} archived", -// Failure: "Failed to archive {{.Subject}}", -// }) -func RegisterIntent(key string, intent Intent) { - customIntentsMu.Lock() - defer customIntentsMu.Unlock() - customIntents[key] = intent -} - -// RegisterIntents adds multiple custom intents at runtime. -// This is more efficient than calling RegisterIntent multiple times. -// This function is thread-safe. -// -// i18n.RegisterIntents(map[string]i18n.Intent{ -// "myapp.archive": { -// Meta: i18n.IntentMeta{Type: "action", Verb: "archive"}, -// Question: "Archive {{.Subject}}?", -// }, -// "myapp.export": { -// Meta: i18n.IntentMeta{Type: "action", Verb: "export"}, -// Question: "Export {{.Subject}}?", -// }, -// }) -func RegisterIntents(intents map[string]Intent) { - customIntentsMu.Lock() - defer customIntentsMu.Unlock() - for k, v := range intents { - customIntents[k] = v - } -} - -// UnregisterIntent removes a custom intent by key. -// This only affects custom intents, not core intents. -// This function is thread-safe. -func UnregisterIntent(key string) { - customIntentsMu.Lock() - defer customIntentsMu.Unlock() - delete(customIntents, key) -} - -// IntentKeys returns all registered intent keys (both core and custom). -func IntentKeys() []string { - customIntentsMu.RLock() - defer customIntentsMu.RUnlock() - - keys := make([]string, 0, len(coreIntents)+len(customIntents)) - for key := range coreIntents { - keys = append(keys, key) - } - for key := range customIntents { - // Avoid duplicates if custom overrides core - found := false - for _, k := range keys { - if k == key { - found = true - break - } - } - if !found { - keys = append(keys, key) - } - } - return keys -} - -// HasIntent returns true if an intent with the given key exists. -func HasIntent(key string) bool { - return getIntent(key) != nil -} - -// GetIntent returns the intent for a key, or nil if not found. -// This is the public API for retrieving intents. -func GetIntent(key string) *Intent { - return getIntent(key) -} diff --git a/pkg/i18n/compose_test.go b/pkg/i18n/compose_test.go deleted file mode 100644 index dffda78..0000000 --- a/pkg/i18n/compose_test.go +++ /dev/null @@ -1,800 +0,0 @@ -package i18n - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -// stringerValue is a test helper that implements fmt.Stringer -type stringerValue struct { - value string -} - -func (s stringerValue) String() string { - return s.value -} - -func TestSubject_Good(t *testing.T) { - t.Run("basic creation", func(t *testing.T) { - s := S("file", "config.yaml") - assert.Equal(t, "file", s.Noun) - assert.Equal(t, "config.yaml", s.Value) - assert.Equal(t, 1, s.count) - assert.Equal(t, "", s.gender) - assert.Equal(t, "", s.location) - }) - - t.Run("S with different value types", func(t *testing.T) { - s := S("repo", "core-php") - assert.Equal(t, "repo", s.Noun) - assert.Equal(t, "core-php", s.Value) - }) - - t.Run("with count", func(t *testing.T) { - s := S("file", "*.go").Count(5) - assert.Equal(t, 5, s.CountInt()) - assert.True(t, s.IsPlural()) - }) - - t.Run("with gender", func(t *testing.T) { - s := S("user", "alice").Gender("female") - assert.Equal(t, "female", s.GenderString()) - }) - - t.Run("with location", func(t *testing.T) { - s := S("file", "config.yaml").In("workspace") - assert.Equal(t, "workspace", s.LocationString()) - }) - - t.Run("chained methods", func(t *testing.T) { - s := S("repo", "core-php").Count(3).Gender("neuter").In("organisation") - assert.Equal(t, "repo", s.NounString()) - assert.Equal(t, 3, s.CountInt()) - assert.Equal(t, "neuter", s.GenderString()) - assert.Equal(t, "organisation", s.LocationString()) - }) -} - -func TestSubject_String(t *testing.T) { - t.Run("string value", func(t *testing.T) { - s := S("file", "config.yaml") - assert.Equal(t, "config.yaml", s.String()) - }) - - t.Run("stringer interface", func(t *testing.T) { - // Using a struct that implements Stringer via embedded method - s := S("item", stringerValue{"test"}) - assert.Equal(t, "test", s.String()) - }) - - t.Run("nil subject", func(t *testing.T) { - var s *Subject - assert.Equal(t, "", s.String()) - }) - - t.Run("int value", func(t *testing.T) { - s := S("count", 42) - assert.Equal(t, "42", s.String()) - }) -} - -func TestSubject_IsPlural(t *testing.T) { - t.Run("singular (count 1)", func(t *testing.T) { - s := S("file", "test.go") - assert.False(t, s.IsPlural()) - }) - - t.Run("plural (count 0)", func(t *testing.T) { - s := S("file", "*.go").Count(0) - assert.True(t, s.IsPlural()) - }) - - t.Run("plural (count > 1)", func(t *testing.T) { - s := S("file", "*.go").Count(5) - assert.True(t, s.IsPlural()) - }) - - t.Run("nil subject", func(t *testing.T) { - var s *Subject - assert.False(t, s.IsPlural()) - }) -} - -func TestSubject_Getters(t *testing.T) { - t.Run("nil safety", func(t *testing.T) { - var s *Subject - assert.Equal(t, "", s.NounString()) - assert.Equal(t, 1, s.CountInt()) - assert.Equal(t, "1", s.CountString()) - assert.Equal(t, "", s.GenderString()) - assert.Equal(t, "", s.LocationString()) - }) - - t.Run("CountString", func(t *testing.T) { - s := S("file", "test.go").Count(42) - assert.Equal(t, "42", s.CountString()) - }) -} - -func TestIntentMeta(t *testing.T) { - meta := IntentMeta{ - Type: "action", - Verb: "delete", - Dangerous: true, - Default: "no", - Supports: []string{"force", "recursive"}, - } - - assert.Equal(t, "action", meta.Type) - assert.Equal(t, "delete", meta.Verb) - assert.True(t, meta.Dangerous) - assert.Equal(t, "no", meta.Default) - assert.Contains(t, meta.Supports, "force") - assert.Contains(t, meta.Supports, "recursive") -} - -func TestComposed(t *testing.T) { - composed := Composed{ - Question: "Delete config.yaml?", - Confirm: "Really delete config.yaml?", - Success: "Config.yaml deleted", - Failure: "Failed to delete config.yaml", - Meta: IntentMeta{ - Type: "action", - Verb: "delete", - Dangerous: true, - Default: "no", - }, - } - - assert.Equal(t, "Delete config.yaml?", composed.Question) - assert.Equal(t, "Really delete config.yaml?", composed.Confirm) - assert.Equal(t, "Config.yaml deleted", composed.Success) - assert.Equal(t, "Failed to delete config.yaml", composed.Failure) - assert.True(t, composed.Meta.Dangerous) -} - -func TestNewTemplateData(t *testing.T) { - t.Run("from subject", func(t *testing.T) { - s := S("file", "config.yaml").Count(3).Gender("neuter").In("workspace") - data := newTemplateData(s) - - assert.Equal(t, "config.yaml", data.Subject) - assert.Equal(t, "file", data.Noun) - assert.Equal(t, 3, data.Count) - assert.Equal(t, "neuter", data.Gender) - assert.Equal(t, "workspace", data.Location) - assert.Equal(t, "config.yaml", data.Value) - }) - - t.Run("from nil subject", func(t *testing.T) { - data := newTemplateData(nil) - - assert.Equal(t, "", data.Subject) - assert.Equal(t, "", data.Noun) - assert.Equal(t, 1, data.Count) - assert.Equal(t, "", data.Gender) - assert.Equal(t, "", data.Location) - assert.Nil(t, data.Value) - }) - - t.Run("with formality", func(t *testing.T) { - s := S("user", "Hans").Formal() - data := newTemplateData(s) - - assert.Equal(t, FormalityFormal, data.Formality) - assert.True(t, data.IsFormal) - }) - - t.Run("with plural", func(t *testing.T) { - s := S("file", "*.go").Count(5) - data := newTemplateData(s) - - assert.True(t, data.IsPlural) - assert.Equal(t, 5, data.Count) - }) -} - -func TestSubject_Formality(t *testing.T) { - t.Run("default is neutral", func(t *testing.T) { - s := S("user", "name") - assert.Equal(t, "neutral", s.FormalityString()) - assert.False(t, s.IsFormal()) - assert.False(t, s.IsInformal()) - }) - - t.Run("Formal()", func(t *testing.T) { - s := S("user", "name").Formal() - assert.Equal(t, "formal", s.FormalityString()) - assert.True(t, s.IsFormal()) - }) - - t.Run("Informal()", func(t *testing.T) { - s := S("user", "name").Informal() - assert.Equal(t, "informal", s.FormalityString()) - assert.True(t, s.IsInformal()) - }) - - t.Run("Formality() explicit", func(t *testing.T) { - s := S("user", "name").Formality(FormalityFormal) - assert.Equal(t, "formal", s.FormalityString()) - }) - - t.Run("nil safety", func(t *testing.T) { - var s *Subject - assert.Equal(t, "neutral", s.FormalityString()) - assert.False(t, s.IsFormal()) - assert.False(t, s.IsInformal()) - }) -} - -// --- Grammar composition tests using intent data --- - -// composeIntent executes intent templates with a subject for testing. -// This is a test helper that replicates what C() used to do. -func composeIntent(intent Intent, subject *Subject) *Composed { - data := newTemplateData(subject) - return &Composed{ - Question: executeIntentTemplate(intent.Question, data), - Confirm: executeIntentTemplate(intent.Confirm, data), - Success: executeIntentTemplate(intent.Success, data), - Failure: executeIntentTemplate(intent.Failure, data), - Meta: intent.Meta, - } -} - -// TestGrammarComposition_MatchesIntents verifies that the grammar engine -// can compose the same strings as the intent templates. -// This turns the intents definitions into a comprehensive test suite. -func TestGrammarComposition_MatchesIntents(t *testing.T) { - // Test subjects for validation - subjects := []struct { - noun string - value string - }{ - {"file", "config.yaml"}, - {"directory", "src"}, - {"repo", "core-php"}, - {"branch", "feature/auth"}, - {"commit", "abc1234"}, - {"changes", "5 files"}, - {"package", "laravel/framework"}, - } - - // Test each core intent's composition - for key, intent := range coreIntents { - t.Run(key, func(t *testing.T) { - for _, subj := range subjects { - subject := S(subj.noun, subj.value) - - // Compose using intent templates directly - composed := composeIntent(intent, subject) - - // Verify Success output matches ActionResult - if intent.Success != "" && intent.Meta.Verb != "" { - // Standard success pattern: "{{.Subject | title}} verbed" - expectedSuccess := ActionResult(intent.Meta.Verb, subj.value) - - // Some intents have non-standard success messages - switch key { - case "core.run": - // "completed" instead of "ran" - expectedSuccess = Title(subj.value) + " completed" - case "core.test": - // "passed" instead of "tested" - expectedSuccess = Title(subj.value) + " passed" - case "core.validate": - // "valid" instead of "validated" - expectedSuccess = Title(subj.value) + " valid" - case "core.check": - // "OK" instead of "checked" - expectedSuccess = Title(subj.value) + " OK" - case "core.continue", "core.proceed": - // No subject in success - continue - case "core.confirm": - // No subject in success - continue - } - - assert.Equal(t, expectedSuccess, composed.Success, - "%s: Success mismatch for subject %s", key, subj.value) - } - - // Verify Failure output matches ActionFailed - if intent.Failure != "" && intent.Meta.Verb != "" { - // Standard failure pattern: "Failed to verb subject" - expectedFailure := ActionFailed(intent.Meta.Verb, subj.value) - - // Some intents have non-standard failure messages - switch key { - case "core.test": - // "failed" instead of "Failed to test" - expectedFailure = Title(subj.value) + " failed" - case "core.validate": - // "invalid" instead of "Failed to validate" - expectedFailure = Title(subj.value) + " invalid" - case "core.check": - // "failed" instead of "Failed to check" - expectedFailure = Title(subj.value) + " failed" - case "core.continue", "core.proceed", "core.confirm": - // Non-standard failures - continue - } - - assert.Equal(t, expectedFailure, composed.Failure, - "%s: Failure mismatch for subject %s", key, subj.value) - } - } - }) - } -} - -// TestActionResult_AllIntentVerbs tests that ActionResult handles -// all verbs used in the core intents. -func TestActionResult_AllIntentVerbs(t *testing.T) { - // Extract all unique verbs from intents - verbs := make(map[string]bool) - for _, intent := range coreIntents { - if intent.Meta.Verb != "" { - verbs[intent.Meta.Verb] = true - } - } - - subject := "test item" - - for verb := range verbs { - t.Run(verb, func(t *testing.T) { - result := ActionResult(verb, subject) - - // Should produce non-empty result - assert.NotEmpty(t, result, "ActionResult(%q, %q) should not be empty", verb, subject) - - // Should start with title-cased subject - assert.Contains(t, result, Title(subject), - "ActionResult should contain title-cased subject") - - // Should contain past tense of verb - past := PastTense(verb) - assert.Contains(t, result, past, - "ActionResult(%q) should contain past tense %q", verb, past) - }) - } -} - -// TestActionFailed_AllIntentVerbs tests that ActionFailed handles -// all verbs used in the core intents. -func TestActionFailed_AllIntentVerbs(t *testing.T) { - verbs := make(map[string]bool) - for _, intent := range coreIntents { - if intent.Meta.Verb != "" { - verbs[intent.Meta.Verb] = true - } - } - - subject := "test item" - - for verb := range verbs { - t.Run(verb, func(t *testing.T) { - result := ActionFailed(verb, subject) - - // Should produce non-empty result - assert.NotEmpty(t, result, "ActionFailed(%q, %q) should not be empty", verb, subject) - - // Should start with "Failed to" - assert.Contains(t, result, "Failed to", - "ActionFailed should contain 'Failed to'") - - // Should contain the verb - assert.Contains(t, result, verb, - "ActionFailed should contain the verb") - - // Should contain the subject - assert.Contains(t, result, subject, - "ActionFailed should contain the subject") - }) - } -} - -// TestProgress_AllIntentVerbs tests that Progress handles -// all verbs used in the core intents. -func TestProgress_AllIntentVerbs(t *testing.T) { - verbs := make(map[string]bool) - for _, intent := range coreIntents { - if intent.Meta.Verb != "" { - verbs[intent.Meta.Verb] = true - } - } - - for verb := range verbs { - t.Run(verb, func(t *testing.T) { - result := Progress(verb) - - // Should produce non-empty result - assert.NotEmpty(t, result, "Progress(%q) should not be empty", verb) - - // Should end with "..." - assert.Contains(t, result, "...", - "Progress should contain '...'") - - // Should contain gerund form - gerund := Gerund(verb) - assert.Contains(t, result, Title(gerund), - "Progress(%q) should contain gerund %q", verb, gerund) - }) - } -} - -// TestPastTense_AllIntentVerbs ensures PastTense works for all intent verbs. -func TestPastTense_AllIntentVerbs(t *testing.T) { - expected := map[string]string{ - // Destructive - "delete": "deleted", - "remove": "removed", - "discard": "discarded", - "reset": "reset", - "overwrite": "overwritten", - - // Creation - "create": "created", - "add": "added", - "clone": "cloned", - "copy": "copied", - - // Modification - "save": "saved", - "update": "updated", - "rename": "renamed", - "move": "moved", - - // Git - "commit": "committed", - "push": "pushed", - "pull": "pulled", - "merge": "merged", - "rebase": "rebased", - - // Network - "install": "installed", - "download": "downloaded", - "upload": "uploaded", - "publish": "published", - "deploy": "deployed", - - // Process - "start": "started", - "stop": "stopped", - "restart": "restarted", - "run": "ran", - "build": "built", - "test": "tested", - - // Info - these are regular verbs ending in consonant, -ed suffix - "continue": "continued", - "proceed": "proceeded", - "confirm": "confirmed", - - // Additional - "sync": "synced", - "boot": "booted", - "format": "formatted", - "analyse": "analysed", - "link": "linked", - "unlink": "unlinked", - "fetch": "fetched", - "generate": "generated", - "validate": "validated", - "check": "checked", - "scan": "scanned", - } - - for verb, want := range expected { - t.Run(verb, func(t *testing.T) { - got := PastTense(verb) - assert.Equal(t, want, got, "PastTense(%q)", verb) - }) - } -} - -// TestGerund_AllIntentVerbs ensures Gerund works for all intent verbs. -func TestGerund_AllIntentVerbs(t *testing.T) { - expected := map[string]string{ - // Destructive - "delete": "deleting", - "remove": "removing", - "discard": "discarding", - "reset": "resetting", - "overwrite": "overwriting", - - // Creation - "create": "creating", - "add": "adding", - "clone": "cloning", - "copy": "copying", - - // Modification - "save": "saving", - "update": "updating", - "rename": "renaming", - "move": "moving", - - // Git - "commit": "committing", - "push": "pushing", - "pull": "pulling", - "merge": "merging", - "rebase": "rebasing", - - // Network - "install": "installing", - "download": "downloading", - "upload": "uploading", - "publish": "publishing", - "deploy": "deploying", - - // Process - "start": "starting", - "stop": "stopping", - "restart": "restarting", - "run": "running", - "build": "building", - "test": "testing", - - // Info - "continue": "continuing", - "proceed": "proceeding", - "confirm": "confirming", - - // Additional - "sync": "syncing", - "boot": "booting", - "format": "formatting", - "analyse": "analysing", - "link": "linking", - "unlink": "unlinking", - "fetch": "fetching", - "generate": "generating", - "validate": "validating", - "check": "checking", - "scan": "scanning", - } - - for verb, want := range expected { - t.Run(verb, func(t *testing.T) { - got := Gerund(verb) - assert.Equal(t, want, got, "Gerund(%q)", verb) - }) - } -} - -// TestQuestionFormat verifies that standard question format -// can be composed from verb and subject. -func TestQuestionFormat(t *testing.T) { - tests := []struct { - verb string - subject string - expected string - }{ - {"delete", "config.yaml", "Delete config.yaml?"}, - {"create", "src", "Create src?"}, - {"commit", "changes", "Commit changes?"}, - {"push", "5 commits", "Push 5 commits?"}, - {"install", "package", "Install package?"}, - } - - for _, tt := range tests { - t.Run(tt.verb, func(t *testing.T) { - // Standard question format: "Verb subject?" - result := Title(tt.verb) + " " + tt.subject + "?" - assert.Equal(t, tt.expected, result) - }) - } -} - -// TestConfirmFormat verifies dangerous action confirm messages. -func TestConfirmFormat(t *testing.T) { - // Dangerous actions have "Really verb subject?" confirm - dangerous := []string{"delete", "remove", "discard", "reset", "overwrite", "merge", "rebase", "publish", "deploy"} - - for _, verb := range dangerous { - t.Run(verb, func(t *testing.T) { - subject := "test item" - // Basic confirm format - result := "Really " + verb + " " + subject + "?" - - assert.Contains(t, result, "Really", - "Dangerous action confirm should start with 'Really'") - assert.Contains(t, result, verb) - assert.Contains(t, result, subject) - assert.Contains(t, result, "?") - }) - } -} - -// TestIntentConsistency verifies patterns across all intents. -func TestIntentConsistency(t *testing.T) { - // These intents have non-standard question formats - specialQuestions := map[string]bool{ - "core.continue": true, // "Continue?" (no subject) - "core.proceed": true, // "Proceed?" (no subject) - "core.confirm": true, // "Are you sure?" (different format) - } - - for key, intent := range coreIntents { - t.Run(key, func(t *testing.T) { - verb := intent.Meta.Verb - - // Verify verb is set - assert.NotEmpty(t, verb, "intent should have a verb") - - // Verify Question contains the verb (unless special case) - if !specialQuestions[key] { - assert.Contains(t, intent.Question, Title(verb)+" ", - "Question should contain '%s '", Title(verb)) - } - - // Verify dangerous intents default to "no" - if intent.Meta.Dangerous { - assert.Equal(t, "no", intent.Meta.Default, - "Dangerous intent should default to 'no'") - } - - // Verify non-dangerous intents default to "yes" - if !intent.Meta.Dangerous && intent.Meta.Type == "action" { - assert.Equal(t, "yes", intent.Meta.Default, - "Safe action intent should default to 'yes'") - } - }) - } -} - -// TestComposedVsManual compares template output with manual grammar composition. -func TestComposedVsManual(t *testing.T) { - tests := []struct { - intentKey string - noun string - value string - }{ - {"core.delete", "file", "config.yaml"}, - {"core.create", "directory", "src"}, - {"core.save", "changes", "data"}, - {"core.commit", "repo", "core-php"}, - {"core.push", "branch", "feature/test"}, - {"core.install", "package", "express"}, - } - - for _, tt := range tests { - t.Run(tt.intentKey, func(t *testing.T) { - subject := S(tt.noun, tt.value) - intent := coreIntents[tt.intentKey] - - // Compose using intent templates - composed := composeIntent(intent, subject) - - // Manual composition using grammar functions - manualSuccess := ActionResult(intent.Meta.Verb, tt.value) - manualFailure := ActionFailed(intent.Meta.Verb, tt.value) - - assert.Equal(t, manualSuccess, composed.Success, - "Template Success should match ActionResult()") - assert.Equal(t, manualFailure, composed.Failure, - "Template Failure should match ActionFailed()") - }) - } -} - -// TestGrammarCanReplaceIntents demonstrates that the grammar engine -// can compose all the standard output forms without hardcoded templates. -// This proves the i18n system can work with just verb definitions. -func TestGrammarCanReplaceIntents(t *testing.T) { - tests := []struct { - verb string - subject string - // Expected outputs that grammar should produce - wantQuestion string - wantSuccess string - wantFailure string - wantProgress string - }{ - { - verb: "delete", - subject: "config.yaml", - wantQuestion: "Delete config.yaml?", - wantSuccess: "Config.Yaml deleted", - wantFailure: "Failed to delete config.yaml", - wantProgress: "Deleting...", - }, - { - verb: "create", - subject: "project", - wantQuestion: "Create project?", - wantSuccess: "Project created", - wantFailure: "Failed to create project", - wantProgress: "Creating...", - }, - { - verb: "build", - subject: "app", - wantQuestion: "Build app?", - wantSuccess: "App built", - wantFailure: "Failed to build app", - wantProgress: "Building...", - }, - { - verb: "run", - subject: "tests", - wantQuestion: "Run tests?", - wantSuccess: "Tests ran", - wantFailure: "Failed to run tests", - wantProgress: "Running...", - }, - { - verb: "commit", - subject: "changes", - wantQuestion: "Commit changes?", - wantSuccess: "Changes committed", - wantFailure: "Failed to commit changes", - wantProgress: "Committing...", - }, - { - verb: "overwrite", - subject: "file", - wantQuestion: "Overwrite file?", - wantSuccess: "File overwritten", - wantFailure: "Failed to overwrite file", - wantProgress: "Overwriting...", - }, - { - verb: "reset", - subject: "state", - wantQuestion: "Reset state?", - wantSuccess: "State reset", - wantFailure: "Failed to reset state", - wantProgress: "Resetting...", - }, - } - - for _, tt := range tests { - t.Run(tt.verb, func(t *testing.T) { - // Compose using grammar functions only (no templates) - question := Title(tt.verb) + " " + tt.subject + "?" - success := ActionResult(tt.verb, tt.subject) - failure := ActionFailed(tt.verb, tt.subject) - progress := Progress(tt.verb) - - assert.Equal(t, tt.wantQuestion, question, "Question") - assert.Equal(t, tt.wantSuccess, success, "Success") - assert.Equal(t, tt.wantFailure, failure, "Failure") - assert.Equal(t, tt.wantProgress, progress, "Progress") - }) - } -} - -// TestProgressSubjectMatchesExpected tests ProgressSubject for all intent verbs. -func TestProgressSubjectMatchesExpected(t *testing.T) { - tests := []struct { - verb string - subject string - want string - }{ - {"delete", "config.yaml", "Deleting config.yaml..."}, - {"create", "project", "Creating project..."}, - {"build", "app", "Building app..."}, - {"install", "package", "Installing package..."}, - {"commit", "changes", "Committing changes..."}, - {"push", "commits", "Pushing commits..."}, - {"pull", "updates", "Pulling updates..."}, - {"sync", "files", "Syncing files..."}, - {"fetch", "data", "Fetching data..."}, - {"check", "status", "Checking status..."}, - } - - for _, tt := range tests { - t.Run(tt.verb, func(t *testing.T) { - result := ProgressSubject(tt.verb, tt.subject) - assert.Equal(t, tt.want, result) - }) - } -} - diff --git a/pkg/i18n/context.go b/pkg/i18n/context.go deleted file mode 100644 index c20d7f5..0000000 --- a/pkg/i18n/context.go +++ /dev/null @@ -1,106 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -// TranslationContext provides disambiguation for translations. -// Use this when the same word translates differently in different contexts. -// -// Example: "right" can mean direction or correctness: -// -// T("direction.right", C("navigation")) // → "rechts" (German) -// T("status.right", C("correctness")) // → "richtig" (German) -type TranslationContext struct { - Context string // Semantic context (e.g., "navigation", "correctness") - Gender string // Grammatical gender hint (e.g., "masculine", "feminine") - Formality Formality // Formality level override - Extra map[string]any // Additional context-specific data -} - -// C creates a TranslationContext with the given context string. -// Chain methods to add more context: -// -// C("navigation").Gender("masculine").Formal() -func C(context string) *TranslationContext { - return &TranslationContext{ - Context: context, - } -} - -// WithGender sets the grammatical gender hint. -func (c *TranslationContext) WithGender(gender string) *TranslationContext { - if c == nil { - return nil - } - c.Gender = gender - return c -} - -// Formal sets the formality level to formal. -func (c *TranslationContext) Formal() *TranslationContext { - if c == nil { - return nil - } - c.Formality = FormalityFormal - return c -} - -// Informal sets the formality level to informal. -func (c *TranslationContext) Informal() *TranslationContext { - if c == nil { - return nil - } - c.Formality = FormalityInformal - return c -} - -// WithFormality sets an explicit formality level. -func (c *TranslationContext) WithFormality(f Formality) *TranslationContext { - if c == nil { - return nil - } - c.Formality = f - return c -} - -// Set adds a key-value pair to the extra context data. -func (c *TranslationContext) Set(key string, value any) *TranslationContext { - if c == nil { - return nil - } - if c.Extra == nil { - c.Extra = make(map[string]any) - } - c.Extra[key] = value - return c -} - -// Get retrieves a value from the extra context data. -func (c *TranslationContext) Get(key string) any { - if c == nil || c.Extra == nil { - return nil - } - return c.Extra[key] -} - -// ContextString returns the context string (nil-safe). -func (c *TranslationContext) ContextString() string { - if c == nil { - return "" - } - return c.Context -} - -// GenderString returns the gender hint (nil-safe). -func (c *TranslationContext) GenderString() string { - if c == nil { - return "" - } - return c.Gender -} - -// FormalityValue returns the formality level (nil-safe). -func (c *TranslationContext) FormalityValue() Formality { - if c == nil { - return FormalityNeutral - } - return c.Formality -} diff --git a/pkg/i18n/context_test.go b/pkg/i18n/context_test.go deleted file mode 100644 index a81cf84..0000000 --- a/pkg/i18n/context_test.go +++ /dev/null @@ -1,125 +0,0 @@ -package i18n - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestTranslationContext_C(t *testing.T) { - t.Run("creates context", func(t *testing.T) { - ctx := C("navigation") - assert.NotNil(t, ctx) - assert.Equal(t, "navigation", ctx.Context) - }) - - t.Run("empty context", func(t *testing.T) { - ctx := C("") - assert.NotNil(t, ctx) - assert.Empty(t, ctx.Context) - }) -} - -func TestTranslationContext_WithGender(t *testing.T) { - t.Run("sets gender", func(t *testing.T) { - ctx := C("context").WithGender("masculine") - assert.Equal(t, "masculine", ctx.Gender) - }) - - t.Run("nil safety", func(t *testing.T) { - var ctx *TranslationContext - result := ctx.WithGender("masculine") - assert.Nil(t, result) - }) -} - -func TestTranslationContext_Formality(t *testing.T) { - t.Run("Formal", func(t *testing.T) { - ctx := C("context").Formal() - assert.Equal(t, FormalityFormal, ctx.Formality) - }) - - t.Run("Informal", func(t *testing.T) { - ctx := C("context").Informal() - assert.Equal(t, FormalityInformal, ctx.Formality) - }) - - t.Run("WithFormality", func(t *testing.T) { - ctx := C("context").WithFormality(FormalityFormal) - assert.Equal(t, FormalityFormal, ctx.Formality) - }) - - t.Run("nil safety", func(t *testing.T) { - var ctx *TranslationContext - assert.Nil(t, ctx.Formal()) - assert.Nil(t, ctx.Informal()) - assert.Nil(t, ctx.WithFormality(FormalityFormal)) - }) -} - -func TestTranslationContext_Extra(t *testing.T) { - t.Run("Set and Get", func(t *testing.T) { - ctx := C("context").Set("key", "value") - assert.Equal(t, "value", ctx.Get("key")) - }) - - t.Run("Get missing key", func(t *testing.T) { - ctx := C("context") - assert.Nil(t, ctx.Get("missing")) - }) - - t.Run("nil safety Set", func(t *testing.T) { - var ctx *TranslationContext - result := ctx.Set("key", "value") - assert.Nil(t, result) - }) - - t.Run("nil safety Get", func(t *testing.T) { - var ctx *TranslationContext - assert.Nil(t, ctx.Get("key")) - }) -} - -func TestTranslationContext_Getters(t *testing.T) { - t.Run("ContextString", func(t *testing.T) { - ctx := C("navigation") - assert.Equal(t, "navigation", ctx.ContextString()) - }) - - t.Run("ContextString nil", func(t *testing.T) { - var ctx *TranslationContext - assert.Empty(t, ctx.ContextString()) - }) - - t.Run("GenderString", func(t *testing.T) { - ctx := C("context").WithGender("feminine") - assert.Equal(t, "feminine", ctx.GenderString()) - }) - - t.Run("GenderString nil", func(t *testing.T) { - var ctx *TranslationContext - assert.Empty(t, ctx.GenderString()) - }) - - t.Run("FormalityValue", func(t *testing.T) { - ctx := C("context").Formal() - assert.Equal(t, FormalityFormal, ctx.FormalityValue()) - }) - - t.Run("FormalityValue nil", func(t *testing.T) { - var ctx *TranslationContext - assert.Equal(t, FormalityNeutral, ctx.FormalityValue()) - }) -} - -func TestTranslationContext_Chaining(t *testing.T) { - ctx := C("navigation"). - WithGender("masculine"). - Formal(). - Set("locale", "de-DE") - - assert.Equal(t, "navigation", ctx.Context) - assert.Equal(t, "masculine", ctx.Gender) - assert.Equal(t, FormalityFormal, ctx.Formality) - assert.Equal(t, "de-DE", ctx.Get("locale")) -} diff --git a/pkg/i18n/debug.go b/pkg/i18n/debug.go deleted file mode 100644 index cc52388..0000000 --- a/pkg/i18n/debug.go +++ /dev/null @@ -1,49 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -// Debug mode provides visibility into i18n key resolution for development. -// When enabled, translations are prefixed with their key: [cli.success] Success -// -// Usage: -// -// i18n.SetDebug(true) -// fmt.Println(i18n.T("cli.success")) // "[cli.success] Success" -// -// This helps identify which keys are being used in the UI, making it easier -// to find and update translations during development. - -// SetDebug enables or disables debug mode on the default service. -// Does nothing if the service is not initialized. -// In debug mode, translations show their keys: [key] translation -// -// SetDebug(true) -// T("cli.success") // "[cli.success] Success" -func SetDebug(enabled bool) { - if svc := Default(); svc != nil { - svc.SetDebug(enabled) - } -} - -// SetDebug enables or disables debug mode. -// In debug mode, translations are prefixed with their key: -// -// [cli.success] Success -// [core.delete] Delete config.yaml? -func (s *Service) SetDebug(enabled bool) { - s.mu.Lock() - defer s.mu.Unlock() - s.debug = enabled -} - -// Debug returns whether debug mode is enabled. -func (s *Service) Debug() bool { - s.mu.RLock() - defer s.mu.RUnlock() - return s.debug -} - -// debugFormat formats a translation with its key prefix for debug mode. -// Returns "[key] text" format. -func debugFormat(key, text string) string { - return "[" + key + "] " + text -} diff --git a/pkg/i18n/grammar.go b/pkg/i18n/grammar.go deleted file mode 100644 index 91859d7..0000000 --- a/pkg/i18n/grammar.go +++ /dev/null @@ -1,532 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -import ( - "strings" - "text/template" - "unicode" -) - -// GetGrammarData returns the grammar data for the specified language. -// Returns nil if no grammar data is loaded for the language. -func GetGrammarData(lang string) *GrammarData { - grammarCacheMu.RLock() - defer grammarCacheMu.RUnlock() - return grammarCache[lang] -} - -// SetGrammarData sets the grammar data for a language. -// Called by the Service when loading locale files. -func SetGrammarData(lang string, data *GrammarData) { - grammarCacheMu.Lock() - defer grammarCacheMu.Unlock() - grammarCache[lang] = data -} - -// getVerbForm retrieves a verb form from JSON data. -// Returns empty string if not found, allowing fallback to computed form. -func getVerbForm(lang, verb, form string) string { - data := GetGrammarData(lang) - if data == nil || data.Verbs == nil { - return "" - } - verb = strings.ToLower(verb) - if forms, ok := data.Verbs[verb]; ok { - switch form { - case "past": - return forms.Past - case "gerund": - return forms.Gerund - } - } - return "" -} - -// getWord retrieves a base word translation from JSON data. -// Returns empty string if not found, allowing fallback to the key itself. -func getWord(lang, word string) string { - data := GetGrammarData(lang) - if data == nil || data.Words == nil { - return "" - } - return data.Words[strings.ToLower(word)] -} - -// getPunct retrieves a punctuation rule for the language. -// Returns the default if not found. -func getPunct(lang, rule, defaultVal string) string { - data := GetGrammarData(lang) - if data == nil { - return defaultVal - } - switch rule { - case "label": - if data.Punct.LabelSuffix != "" { - return data.Punct.LabelSuffix - } - case "progress": - if data.Punct.ProgressSuffix != "" { - return data.Punct.ProgressSuffix - } - } - return defaultVal -} - -// getNounForm retrieves a noun form from JSON data. -// Returns empty string if not found, allowing fallback to computed form. -func getNounForm(lang, noun, form string) string { - data := GetGrammarData(lang) - if data == nil || data.Nouns == nil { - return "" - } - noun = strings.ToLower(noun) - if forms, ok := data.Nouns[noun]; ok { - switch form { - case "one": - return forms.One - case "other": - return forms.Other - case "gender": - return forms.Gender - } - } - return "" -} - -// currentLangForGrammar returns the current language for grammar lookups. -// Uses the default service's language if available. -func currentLangForGrammar() string { - if svc := Default(); svc != nil { - return svc.Language() - } - return "en-GB" -} - -// PastTense returns the past tense of a verb. -// Checks JSON locale data first, then irregular verbs, then applies regular rules. -// -// PastTense("delete") // "deleted" -// PastTense("run") // "ran" -// PastTense("copy") // "copied" -func PastTense(verb string) string { - verb = strings.ToLower(strings.TrimSpace(verb)) - if verb == "" { - return "" - } - - // Check JSON data first (for current language) - if form := getVerbForm(currentLangForGrammar(), verb, "past"); form != "" { - return form - } - - // Check irregular verbs - if forms, ok := irregularVerbs[verb]; ok { - return forms.Past - } - - return applyRegularPastTense(verb) -} - -// applyRegularPastTense applies regular past tense rules. -func applyRegularPastTense(verb string) string { - // Already ends in -ed (but not -eed, -ied which need different handling) - // Words like "proceed", "succeed", "exceed" end in -eed and are NOT past tense - if strings.HasSuffix(verb, "ed") && len(verb) > 2 { - // Check if it's actually a past tense suffix (consonant + ed) - // vs a word root ending (e.g., "proceed" = proc + eed, "feed" = feed) - thirdFromEnd := verb[len(verb)-3] - if !isVowel(rune(thirdFromEnd)) && thirdFromEnd != 'e' { - // Consonant before -ed means it's likely already past tense - return verb - } - // Words ending in vowel + ed (like "proceed") need -ed added - } - - // Ends in -e: just add -d - if strings.HasSuffix(verb, "e") { - return verb + "d" - } - - // Ends in consonant + y: change y to ied - if strings.HasSuffix(verb, "y") && len(verb) > 1 { - prev := rune(verb[len(verb)-2]) - if !isVowel(prev) { - return verb[:len(verb)-1] + "ied" - } - } - - // Ends in single vowel + single consonant (CVC pattern): double consonant - if len(verb) >= 2 && shouldDoubleConsonant(verb) { - return verb + string(verb[len(verb)-1]) + "ed" - } - - // Default: add -ed - return verb + "ed" -} - -// shouldDoubleConsonant checks if the final consonant should be doubled. -// Applies to CVC (consonant-vowel-consonant) endings in single-syllable words -// and stressed final syllables in multi-syllable words. -func shouldDoubleConsonant(verb string) bool { - if len(verb) < 3 { - return false - } - - // Check explicit exceptions - if noDoubleConsonant[verb] { - return false - } - - lastChar := rune(verb[len(verb)-1]) - secondLast := rune(verb[len(verb)-2]) - - // Last char must be consonant (not w, x, y) - if isVowel(lastChar) || lastChar == 'w' || lastChar == 'x' || lastChar == 'y' { - return false - } - - // Second to last must be a single vowel - if !isVowel(secondLast) { - return false - } - - // For short words (3-4 chars), always double if CVC pattern - if len(verb) <= 4 { - thirdLast := rune(verb[len(verb)-3]) - return !isVowel(thirdLast) - } - - // For longer words, only double if the pattern is strongly CVC - // (stressed final syllable). This is a simplification - in practice, - // most common multi-syllable verbs either: - // 1. End in a doubled consonant already (e.g., "submit" -> "submitted") - // 2. Don't double (e.g., "open" -> "opened") - // We err on the side of not doubling for longer words - return false -} - -// Gerund returns the present participle (-ing form) of a verb. -// Checks JSON locale data first, then irregular verbs, then applies regular rules. -// -// Gerund("delete") // "deleting" -// Gerund("run") // "running" -// Gerund("die") // "dying" -func Gerund(verb string) string { - verb = strings.ToLower(strings.TrimSpace(verb)) - if verb == "" { - return "" - } - - // Check JSON data first (for current language) - if form := getVerbForm(currentLangForGrammar(), verb, "gerund"); form != "" { - return form - } - - // Check irregular verbs - if forms, ok := irregularVerbs[verb]; ok { - return forms.Gerund - } - - return applyRegularGerund(verb) -} - -// applyRegularGerund applies regular gerund rules. -func applyRegularGerund(verb string) string { - // Ends in -ie: change to -ying - if strings.HasSuffix(verb, "ie") { - return verb[:len(verb)-2] + "ying" - } - - // Ends in -e (but not -ee, -ye, -oe): drop e, add -ing - if strings.HasSuffix(verb, "e") && len(verb) > 1 { - secondLast := rune(verb[len(verb)-2]) - if secondLast != 'e' && secondLast != 'y' && secondLast != 'o' { - return verb[:len(verb)-1] + "ing" - } - } - - // CVC pattern: double final consonant - if shouldDoubleConsonant(verb) { - return verb + string(verb[len(verb)-1]) + "ing" - } - - // Default: add -ing - return verb + "ing" -} - -// Pluralize returns the plural form of a noun based on count. -// If count is 1, returns the singular form unchanged. -// -// Pluralize("file", 1) // "file" -// Pluralize("file", 5) // "files" -// Pluralize("child", 3) // "children" -// Pluralize("box", 2) // "boxes" -func Pluralize(noun string, count int) string { - if count == 1 { - return noun - } - return PluralForm(noun) -} - -// PluralForm returns the plural form of a noun. -// Checks JSON locale data first, then irregular nouns, then applies regular rules. -// -// PluralForm("file") // "files" -// PluralForm("child") // "children" -// PluralForm("box") // "boxes" -func PluralForm(noun string) string { - noun = strings.TrimSpace(noun) - if noun == "" { - return "" - } - - lower := strings.ToLower(noun) - - // Check JSON data first (for current language) - if form := getNounForm(currentLangForGrammar(), lower, "other"); form != "" { - // Preserve original casing if title case - if unicode.IsUpper(rune(noun[0])) && len(form) > 0 { - return strings.ToUpper(string(form[0])) + form[1:] - } - return form - } - - // Check irregular nouns - if plural, ok := irregularNouns[lower]; ok { - // Preserve original casing if title case - if unicode.IsUpper(rune(noun[0])) { - return strings.ToUpper(string(plural[0])) + plural[1:] - } - return plural - } - - return applyRegularPlural(noun) -} - -// applyRegularPlural applies regular plural rules. -func applyRegularPlural(noun string) string { - lower := strings.ToLower(noun) - - // Words ending in -s, -ss, -sh, -ch, -x, -z: add -es - if strings.HasSuffix(lower, "s") || - strings.HasSuffix(lower, "ss") || - strings.HasSuffix(lower, "sh") || - strings.HasSuffix(lower, "ch") || - strings.HasSuffix(lower, "x") || - strings.HasSuffix(lower, "z") { - return noun + "es" - } - - // Words ending in consonant + y: change y to ies - if strings.HasSuffix(lower, "y") && len(noun) > 1 { - prev := rune(lower[len(lower)-2]) - if !isVowel(prev) { - return noun[:len(noun)-1] + "ies" - } - } - - // Words ending in -f or -fe: change to -ves (some exceptions already in irregulars) - if strings.HasSuffix(lower, "f") { - return noun[:len(noun)-1] + "ves" - } - if strings.HasSuffix(lower, "fe") { - return noun[:len(noun)-2] + "ves" - } - - // Words ending in -o preceded by consonant: add -es - if strings.HasSuffix(lower, "o") && len(noun) > 1 { - prev := rune(lower[len(lower)-2]) - if !isVowel(prev) { - // Many exceptions (photos, pianos) - but common tech terms add -es - if lower == "hero" || lower == "potato" || lower == "tomato" || lower == "echo" || lower == "veto" { - return noun + "es" - } - } - } - - // Default: add -s - return noun + "s" -} - -// Article returns the appropriate indefinite article ("a" or "an") for a word. -// -// Article("file") // "a" -// Article("error") // "an" -// Article("user") // "a" (sounds like "yoo-zer") -// Article("hour") // "an" (silent h) -func Article(word string) string { - if word == "" { - return "" - } - - lower := strings.ToLower(strings.TrimSpace(word)) - - // Check for consonant sounds (words starting with vowels but sounding like consonants) - for key := range consonantSounds { - if strings.HasPrefix(lower, key) { - return "a" - } - } - - // Check for vowel sounds (words starting with consonants but sounding like vowels) - for key := range vowelSounds { - if strings.HasPrefix(lower, key) { - return "an" - } - } - - // Check first letter - if len(lower) > 0 && isVowel(rune(lower[0])) { - return "an" - } - - return "a" -} - -// isVowel returns true if the rune is a vowel (a, e, i, o, u). -func isVowel(r rune) bool { - switch unicode.ToLower(r) { - case 'a', 'e', 'i', 'o', 'u': - return true - } - return false -} - -// Title capitalizes the first letter of each word. -// Uses unicode-aware casing for proper internationalization. -// Word boundaries are defined as any non-letter character (matching strings.Title behavior). -func Title(s string) string { - var b strings.Builder - b.Grow(len(s)) - prev := ' ' // Treat start of string as word boundary - for _, r := range s { - if !unicode.IsLetter(prev) && unicode.IsLetter(r) { - b.WriteRune(unicode.ToUpper(r)) - } else { - b.WriteRune(r) - } - prev = r - } - return b.String() -} - -// Quote wraps a string in double quotes. -func Quote(s string) string { - return `"` + s + `"` -} - -// TemplateFuncs returns the template.FuncMap with all grammar functions. -// Use this to add grammar helpers to your templates. -// -// tmpl := template.New("").Funcs(i18n.TemplateFuncs()) -func TemplateFuncs() template.FuncMap { - return template.FuncMap{ - "title": Title, - "lower": strings.ToLower, - "upper": strings.ToUpper, - "past": PastTense, - "gerund": Gerund, - "plural": Pluralize, - "pluralForm": PluralForm, - "article": Article, - "quote": Quote, - } -} - -// Progress returns a progress message for a verb. -// Generates "Verbing..." form using language-specific punctuation. -// -// Progress("build") // "Building..." -// Progress("check") // "Checking..." -// Progress("fetch") // "Fetching..." -func Progress(verb string) string { - lang := currentLangForGrammar() - - // Try translated word first - word := getWord(lang, verb) - if word == "" { - word = verb - } - - g := Gerund(word) - if g == "" { - return "" - } - - suffix := getPunct(lang, "progress", "...") - return Title(g) + suffix -} - -// ProgressSubject returns a progress message with a subject. -// Generates "Verbing subject..." form using language-specific punctuation. -// -// ProgressSubject("build", "project") // "Building project..." -// ProgressSubject("check", "config.yaml") // "Checking config.yaml..." -func ProgressSubject(verb, subject string) string { - lang := currentLangForGrammar() - - // Try translated word first - word := getWord(lang, verb) - if word == "" { - word = verb - } - - g := Gerund(word) - if g == "" { - return "" - } - - suffix := getPunct(lang, "progress", "...") - return Title(g) + " " + subject + suffix -} - -// ActionResult returns a result message for a completed action. -// Generates "Subject verbed" form. -// -// ActionResult("delete", "file") // "File deleted" -// ActionResult("commit", "changes") // "Changes committed" -func ActionResult(verb, subject string) string { - p := PastTense(verb) - if p == "" || subject == "" { - return "" - } - return Title(subject) + " " + p -} - -// ActionFailed returns a failure message for an action. -// Generates "Failed to verb subject" form. -// -// ActionFailed("delete", "file") // "Failed to delete file" -// ActionFailed("push", "commits") // "Failed to push commits" -func ActionFailed(verb, subject string) string { - if verb == "" { - return "" - } - if subject == "" { - return "Failed to " + verb - } - return "Failed to " + verb + " " + subject -} - -// Label returns a label with a colon suffix. -// Generates "Word:" form using language-specific punctuation. -// French uses " :" (space before colon), English uses ":". -// -// Label("status") // EN: "Status:" FR: "Statut :" -// Label("version") // EN: "Version:" FR: "Version :" -func Label(word string) string { - if word == "" { - return "" - } - - lang := currentLangForGrammar() - - // Try translated word first - translated := getWord(lang, word) - if translated == "" { - translated = word - } - - suffix := getPunct(lang, "label", ":") - return Title(translated) + suffix -} diff --git a/pkg/i18n/grammar_test.go b/pkg/i18n/grammar_test.go deleted file mode 100644 index 00780f2..0000000 --- a/pkg/i18n/grammar_test.go +++ /dev/null @@ -1,303 +0,0 @@ -package i18n - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestPastTense(t *testing.T) { - tests := []struct { - verb string - expected string - }{ - // Irregular verbs - {"be", "was"}, - {"have", "had"}, - {"do", "did"}, - {"go", "went"}, - {"make", "made"}, - {"get", "got"}, - {"run", "ran"}, - {"write", "wrote"}, - {"build", "built"}, - {"find", "found"}, - {"keep", "kept"}, - {"think", "thought"}, - - // Regular verbs - ends in -e - {"delete", "deleted"}, - {"save", "saved"}, - {"create", "created"}, - {"update", "updated"}, - {"remove", "removed"}, - - // Regular verbs - consonant + y -> ied - {"copy", "copied"}, - {"carry", "carried"}, - {"try", "tried"}, - - // Regular verbs - vowel + y -> yed - {"play", "played"}, - {"stay", "stayed"}, - {"enjoy", "enjoyed"}, - - // Regular verbs - CVC doubling - {"stop", "stopped"}, - {"drop", "dropped"}, - {"plan", "planned"}, - - // Regular verbs - no doubling - {"install", "installed"}, - {"open", "opened"}, - {"start", "started"}, - - // Edge cases - {"", ""}, - {" delete ", "deleted"}, - } - - for _, tt := range tests { - t.Run(tt.verb, func(t *testing.T) { - result := PastTense(tt.verb) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestGerund(t *testing.T) { - tests := []struct { - verb string - expected string - }{ - // Irregular verbs - {"be", "being"}, - {"have", "having"}, - {"run", "running"}, - {"write", "writing"}, - - // Regular verbs - drop -e - {"delete", "deleting"}, - {"save", "saving"}, - {"create", "creating"}, - {"update", "updating"}, - - // Regular verbs - ie -> ying - {"die", "dying"}, - {"lie", "lying"}, - {"tie", "tying"}, - - // Regular verbs - CVC doubling - {"stop", "stopping"}, - {"run", "running"}, - {"plan", "planning"}, - - // Regular verbs - no doubling - {"install", "installing"}, - {"open", "opening"}, - {"start", "starting"}, - {"play", "playing"}, - - // Edge cases - {"", ""}, - } - - for _, tt := range tests { - t.Run(tt.verb, func(t *testing.T) { - result := Gerund(tt.verb) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestPluralize(t *testing.T) { - tests := []struct { - noun string - count int - expected string - }{ - // Singular (count = 1) - {"file", 1, "file"}, - {"repo", 1, "repo"}, - - // Regular plurals - {"file", 2, "files"}, - {"repo", 5, "repos"}, - {"user", 0, "users"}, - - // -s, -ss, -sh, -ch, -x, -z -> -es - {"bus", 2, "buses"}, - {"class", 3, "classes"}, - {"bush", 2, "bushes"}, - {"match", 2, "matches"}, - {"box", 2, "boxes"}, - - // consonant + y -> -ies - {"city", 2, "cities"}, - {"repository", 3, "repositories"}, - {"copy", 2, "copies"}, - - // vowel + y -> -ys - {"key", 2, "keys"}, - {"day", 2, "days"}, - {"toy", 2, "toys"}, - - // Irregular nouns - {"child", 2, "children"}, - {"person", 3, "people"}, - {"man", 2, "men"}, - {"woman", 2, "women"}, - {"foot", 2, "feet"}, - {"tooth", 2, "teeth"}, - {"mouse", 2, "mice"}, - {"index", 2, "indices"}, - - // Unchanged plurals - {"fish", 2, "fish"}, - {"sheep", 2, "sheep"}, - {"deer", 2, "deer"}, - {"species", 2, "species"}, - } - - for _, tt := range tests { - t.Run(tt.noun, func(t *testing.T) { - result := Pluralize(tt.noun, tt.count) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestPluralForm(t *testing.T) { - tests := []struct { - noun string - expected string - }{ - // Regular - {"file", "files"}, - {"repo", "repos"}, - - // -es endings - {"box", "boxes"}, - {"class", "classes"}, - {"bush", "bushes"}, - {"match", "matches"}, - - // -ies endings - {"city", "cities"}, - {"copy", "copies"}, - - // Irregular - {"child", "children"}, - {"person", "people"}, - - // Title case preservation - {"Child", "Children"}, - {"Person", "People"}, - - // Empty - {"", ""}, - } - - for _, tt := range tests { - t.Run(tt.noun, func(t *testing.T) { - result := PluralForm(tt.noun) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestArticle(t *testing.T) { - tests := []struct { - word string - expected string - }{ - // Regular vowels -> "an" - {"error", "an"}, - {"apple", "an"}, - {"issue", "an"}, - {"update", "an"}, - {"item", "an"}, - {"object", "an"}, - - // Regular consonants -> "a" - {"file", "a"}, - {"repo", "a"}, - {"commit", "a"}, - {"branch", "a"}, - {"test", "a"}, - - // Consonant sounds despite vowel start -> "a" - {"user", "a"}, - {"union", "a"}, - {"unique", "a"}, - {"unit", "a"}, - {"universe", "a"}, - {"one", "a"}, - {"once", "a"}, - {"euro", "a"}, - - // Vowel sounds despite consonant start -> "an" - {"hour", "an"}, - {"honest", "an"}, - {"honour", "an"}, - {"heir", "an"}, - - // Edge cases - {"", ""}, - {" error ", "an"}, - } - - for _, tt := range tests { - t.Run(tt.word, func(t *testing.T) { - result := Article(tt.word) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestTitle(t *testing.T) { - tests := []struct { - input string - expected string - }{ - {"hello world", "Hello World"}, - {"file deleted", "File Deleted"}, - {"ALREADY CAPS", "ALREADY CAPS"}, - {"", ""}, - } - - for _, tt := range tests { - t.Run(tt.input, func(t *testing.T) { - result := Title(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestQuote(t *testing.T) { - tests := []struct { - input string - expected string - }{ - {"file.txt", `"file.txt"`}, - {"", `""`}, - {"hello world", `"hello world"`}, - } - - for _, tt := range tests { - t.Run(tt.input, func(t *testing.T) { - result := Quote(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestTemplateFuncs(t *testing.T) { - funcs := TemplateFuncs() - - // Check all expected functions are present - expectedFuncs := []string{"title", "lower", "upper", "past", "gerund", "plural", "pluralForm", "article", "quote"} - for _, name := range expectedFuncs { - assert.Contains(t, funcs, name, "TemplateFuncs should contain %s", name) - } -} diff --git a/pkg/i18n/handler.go b/pkg/i18n/handler.go deleted file mode 100644 index d40df14..0000000 --- a/pkg/i18n/handler.go +++ /dev/null @@ -1,166 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -import ( - "fmt" - "strings" -) - -// --- Built-in Handlers --- - -// LabelHandler handles i18n.label.{word} → "Status:" patterns. -type LabelHandler struct{} - -func (h LabelHandler) Match(key string) bool { - return strings.HasPrefix(key, "i18n.label.") -} - -func (h LabelHandler) Handle(key string, args []any, next func() string) string { - word := strings.TrimPrefix(key, "i18n.label.") - return Label(word) -} - -// ProgressHandler handles i18n.progress.{verb} → "Building..." patterns. -type ProgressHandler struct{} - -func (h ProgressHandler) Match(key string) bool { - return strings.HasPrefix(key, "i18n.progress.") -} - -func (h ProgressHandler) Handle(key string, args []any, next func() string) string { - verb := strings.TrimPrefix(key, "i18n.progress.") - if len(args) > 0 { - if subj, ok := args[0].(string); ok { - return ProgressSubject(verb, subj) - } - } - return Progress(verb) -} - -// CountHandler handles i18n.count.{noun} → "5 files" patterns. -type CountHandler struct{} - -func (h CountHandler) Match(key string) bool { - return strings.HasPrefix(key, "i18n.count.") -} - -func (h CountHandler) Handle(key string, args []any, next func() string) string { - noun := strings.TrimPrefix(key, "i18n.count.") - if len(args) > 0 { - count := toInt(args[0]) - return fmt.Sprintf("%d %s", count, Pluralize(noun, count)) - } - return noun -} - -// DoneHandler handles i18n.done.{verb} → "File deleted" patterns. -type DoneHandler struct{} - -func (h DoneHandler) Match(key string) bool { - return strings.HasPrefix(key, "i18n.done.") -} - -func (h DoneHandler) Handle(key string, args []any, next func() string) string { - verb := strings.TrimPrefix(key, "i18n.done.") - if len(args) > 0 { - if subj, ok := args[0].(string); ok { - return ActionResult(verb, subj) - } - } - return Title(PastTense(verb)) -} - -// FailHandler handles i18n.fail.{verb} → "Failed to delete file" patterns. -type FailHandler struct{} - -func (h FailHandler) Match(key string) bool { - return strings.HasPrefix(key, "i18n.fail.") -} - -func (h FailHandler) Handle(key string, args []any, next func() string) string { - verb := strings.TrimPrefix(key, "i18n.fail.") - if len(args) > 0 { - if subj, ok := args[0].(string); ok { - return ActionFailed(verb, subj) - } - } - return ActionFailed(verb, "") -} - -// NumericHandler handles i18n.numeric.{format} → formatted numbers. -type NumericHandler struct{} - -func (h NumericHandler) Match(key string) bool { - return strings.HasPrefix(key, "i18n.numeric.") -} - -func (h NumericHandler) Handle(key string, args []any, next func() string) string { - if len(args) == 0 { - return next() - } - - format := strings.TrimPrefix(key, "i18n.numeric.") - switch format { - case "number", "int": - return FormatNumber(toInt64(args[0])) - case "decimal", "float": - return FormatDecimal(toFloat64(args[0])) - case "percent", "pct": - return FormatPercent(toFloat64(args[0])) - case "bytes", "size": - return FormatBytes(toInt64(args[0])) - case "ordinal", "ord": - return FormatOrdinal(toInt(args[0])) - case "ago": - if len(args) >= 2 { - if unit, ok := args[1].(string); ok { - return FormatAgo(toInt(args[0]), unit) - } - } - } - return next() -} - -// --- Handler Chain --- - -// DefaultHandlers returns the built-in i18n.* namespace handlers. -func DefaultHandlers() []KeyHandler { - return []KeyHandler{ - LabelHandler{}, - ProgressHandler{}, - CountHandler{}, - DoneHandler{}, - FailHandler{}, - NumericHandler{}, - } -} - -// RunHandlerChain executes a chain of handlers for a key. -// Returns empty string if no handler matched (caller should use standard lookup). -func RunHandlerChain(handlers []KeyHandler, key string, args []any, fallback func() string) string { - for i, h := range handlers { - if h.Match(key) { - // Create next function that tries remaining handlers - next := func() string { - remaining := handlers[i+1:] - if len(remaining) > 0 { - return RunHandlerChain(remaining, key, args, fallback) - } - return fallback() - } - return h.Handle(key, args, next) - } - } - return fallback() -} - -// --- Compile-time interface checks --- - -var ( - _ KeyHandler = LabelHandler{} - _ KeyHandler = ProgressHandler{} - _ KeyHandler = CountHandler{} - _ KeyHandler = DoneHandler{} - _ KeyHandler = FailHandler{} - _ KeyHandler = NumericHandler{} -) diff --git a/pkg/i18n/handler_test.go b/pkg/i18n/handler_test.go deleted file mode 100644 index bdc56a0..0000000 --- a/pkg/i18n/handler_test.go +++ /dev/null @@ -1,173 +0,0 @@ -package i18n - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestLabelHandler(t *testing.T) { - h := LabelHandler{} - - t.Run("matches i18n.label prefix", func(t *testing.T) { - assert.True(t, h.Match("i18n.label.status")) - assert.True(t, h.Match("i18n.label.version")) - assert.False(t, h.Match("i18n.progress.build")) - assert.False(t, h.Match("cli.label.status")) - }) - - t.Run("handles label", func(t *testing.T) { - result := h.Handle("i18n.label.status", nil, func() string { return "fallback" }) - assert.Equal(t, "Status:", result) - }) -} - -func TestProgressHandler(t *testing.T) { - h := ProgressHandler{} - - t.Run("matches i18n.progress prefix", func(t *testing.T) { - assert.True(t, h.Match("i18n.progress.build")) - assert.True(t, h.Match("i18n.progress.check")) - assert.False(t, h.Match("i18n.label.status")) - }) - - t.Run("handles progress without subject", func(t *testing.T) { - result := h.Handle("i18n.progress.build", nil, func() string { return "fallback" }) - assert.Equal(t, "Building...", result) - }) - - t.Run("handles progress with subject", func(t *testing.T) { - result := h.Handle("i18n.progress.check", []any{"config"}, func() string { return "fallback" }) - assert.Equal(t, "Checking config...", result) - }) -} - -func TestCountHandler(t *testing.T) { - h := CountHandler{} - - t.Run("matches i18n.count prefix", func(t *testing.T) { - assert.True(t, h.Match("i18n.count.file")) - assert.True(t, h.Match("i18n.count.repo")) - assert.False(t, h.Match("i18n.label.count")) - }) - - t.Run("handles count with number", func(t *testing.T) { - result := h.Handle("i18n.count.file", []any{5}, func() string { return "fallback" }) - assert.Equal(t, "5 files", result) - }) - - t.Run("handles singular count", func(t *testing.T) { - result := h.Handle("i18n.count.file", []any{1}, func() string { return "fallback" }) - assert.Equal(t, "1 file", result) - }) - - t.Run("handles no args", func(t *testing.T) { - result := h.Handle("i18n.count.file", nil, func() string { return "fallback" }) - assert.Equal(t, "file", result) - }) -} - -func TestDoneHandler(t *testing.T) { - h := DoneHandler{} - - t.Run("matches i18n.done prefix", func(t *testing.T) { - assert.True(t, h.Match("i18n.done.delete")) - assert.True(t, h.Match("i18n.done.save")) - assert.False(t, h.Match("i18n.fail.delete")) - }) - - t.Run("handles done with subject", func(t *testing.T) { - result := h.Handle("i18n.done.delete", []any{"config.yaml"}, func() string { return "fallback" }) - // ActionResult title-cases the subject - assert.Equal(t, "Config.Yaml deleted", result) - }) - - t.Run("handles done without subject", func(t *testing.T) { - result := h.Handle("i18n.done.delete", nil, func() string { return "fallback" }) - assert.Equal(t, "Deleted", result) - }) -} - -func TestFailHandler(t *testing.T) { - h := FailHandler{} - - t.Run("matches i18n.fail prefix", func(t *testing.T) { - assert.True(t, h.Match("i18n.fail.delete")) - assert.True(t, h.Match("i18n.fail.save")) - assert.False(t, h.Match("i18n.done.delete")) - }) - - t.Run("handles fail with subject", func(t *testing.T) { - result := h.Handle("i18n.fail.delete", []any{"config.yaml"}, func() string { return "fallback" }) - assert.Equal(t, "Failed to delete config.yaml", result) - }) - - t.Run("handles fail without subject", func(t *testing.T) { - result := h.Handle("i18n.fail.delete", nil, func() string { return "fallback" }) - assert.Contains(t, result, "Failed to delete") - }) -} - -func TestNumericHandler(t *testing.T) { - h := NumericHandler{} - - t.Run("matches i18n.numeric prefix", func(t *testing.T) { - assert.True(t, h.Match("i18n.numeric.number")) - assert.True(t, h.Match("i18n.numeric.bytes")) - assert.False(t, h.Match("i18n.count.file")) - }) - - t.Run("handles number format", func(t *testing.T) { - result := h.Handle("i18n.numeric.number", []any{1234567}, func() string { return "fallback" }) - assert.Equal(t, "1,234,567", result) - }) - - t.Run("handles bytes format", func(t *testing.T) { - result := h.Handle("i18n.numeric.bytes", []any{1024}, func() string { return "fallback" }) - assert.Equal(t, "1 KB", result) - }) - - t.Run("handles ordinal format", func(t *testing.T) { - result := h.Handle("i18n.numeric.ordinal", []any{3}, func() string { return "fallback" }) - assert.Equal(t, "3rd", result) - }) - - t.Run("falls through on no args", func(t *testing.T) { - result := h.Handle("i18n.numeric.number", nil, func() string { return "fallback" }) - assert.Equal(t, "fallback", result) - }) - - t.Run("falls through on unknown format", func(t *testing.T) { - result := h.Handle("i18n.numeric.unknown", []any{123}, func() string { return "fallback" }) - assert.Equal(t, "fallback", result) - }) -} - -func TestDefaultHandlers(t *testing.T) { - handlers := DefaultHandlers() - assert.Len(t, handlers, 6) -} - -func TestRunHandlerChain(t *testing.T) { - handlers := DefaultHandlers() - - t.Run("label handler matches", func(t *testing.T) { - result := RunHandlerChain(handlers, "i18n.label.status", nil, func() string { return "fallback" }) - assert.Equal(t, "Status:", result) - }) - - t.Run("progress handler matches", func(t *testing.T) { - result := RunHandlerChain(handlers, "i18n.progress.build", nil, func() string { return "fallback" }) - assert.Equal(t, "Building...", result) - }) - - t.Run("falls back for unknown key", func(t *testing.T) { - result := RunHandlerChain(handlers, "cli.unknown", nil, func() string { return "fallback" }) - assert.Equal(t, "fallback", result) - }) - - t.Run("empty handler chain uses fallback", func(t *testing.T) { - result := RunHandlerChain(nil, "any.key", nil, func() string { return "fallback" }) - assert.Equal(t, "fallback", result) - }) -} diff --git a/pkg/i18n/hooks.go b/pkg/i18n/hooks.go deleted file mode 100644 index 5a8049e..0000000 --- a/pkg/i18n/hooks.go +++ /dev/null @@ -1,96 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -import ( - "io/fs" - "runtime" - "sync" - "sync/atomic" -) - -var missingKeyHandler atomic.Value // stores MissingKeyHandler - -// localeRegistration holds a filesystem and directory for locale loading. -type localeRegistration struct { - fsys fs.FS - dir string -} - -var ( - registeredLocales []localeRegistration - registeredLocalesMu sync.Mutex - localesLoaded bool -) - -// RegisterLocales registers a filesystem containing locale files to be loaded. -// Call this in your package's init() to register translations. -// Locales are loaded when the i18n service initialises. -// -// //go:embed locales/*.json -// var localeFS embed.FS -// -// func init() { -// i18n.RegisterLocales(localeFS, "locales") -// } -func RegisterLocales(fsys fs.FS, dir string) { - registeredLocalesMu.Lock() - defer registeredLocalesMu.Unlock() - registeredLocales = append(registeredLocales, localeRegistration{fsys: fsys, dir: dir}) - - // If locales already loaded (service already running), load immediately - if localesLoaded { - if svc := Default(); svc != nil { - _ = svc.LoadFS(fsys, dir) - } - } -} - -// loadRegisteredLocales loads all registered locale filesystems into the service. -// Called by the service during initialisation. -func loadRegisteredLocales(svc *Service) { - registeredLocalesMu.Lock() - defer registeredLocalesMu.Unlock() - - for _, reg := range registeredLocales { - _ = svc.LoadFS(reg.fsys, reg.dir) - } - localesLoaded = true -} - -// OnMissingKey registers a handler for missing translation keys. -// Called when T() can't find a key in ModeCollect. -// Thread-safe: can be called concurrently with translations. -// -// i18n.SetMode(i18n.ModeCollect) -// i18n.OnMissingKey(func(m i18n.MissingKey) { -// log.Printf("MISSING: %s at %s:%d", m.Key, m.CallerFile, m.CallerLine) -// }) -func OnMissingKey(h MissingKeyHandler) { - missingKeyHandler.Store(h) -} - -// dispatchMissingKey creates and dispatches a MissingKey event. -// Called internally when a key is missing in ModeCollect. -func dispatchMissingKey(key string, args map[string]any) { - v := missingKeyHandler.Load() - if v == nil { - return - } - h, ok := v.(MissingKeyHandler) - if !ok || h == nil { - return - } - - _, file, line, ok := runtime.Caller(2) // Skip dispatchMissingKey and handleMissingKey - if !ok { - file = "unknown" - line = 0 - } - - h(MissingKey{ - Key: key, - Args: args, - CallerFile: file, - CallerLine: line, - }) -} diff --git a/pkg/i18n/i18n.go b/pkg/i18n/i18n.go deleted file mode 100644 index 60959d1..0000000 --- a/pkg/i18n/i18n.go +++ /dev/null @@ -1,192 +0,0 @@ -// Package i18n provides internationalization for the CLI. -// -// Locale files use nested JSON for compatibility with translation tools: -// -// { -// "cli": { -// "success": "Operation completed", -// "count": { -// "items": { -// "one": "{{.Count}} item", -// "other": "{{.Count}} items" -// } -// } -// } -// } -// -// Keys are accessed with dot notation: T("cli.success"), T("cli.count.items") -// -// # Getting Started -// -// svc, err := i18n.New() -// fmt.Println(svc.T("cli.success")) -// fmt.Println(svc.T("cli.count.items", map[string]any{"Count": 5})) -package i18n - -import ( - "bytes" - "errors" - "strings" - "text/template" -) - -// --- Global convenience functions --- - -// T translates a message using the default service. -// For semantic intents (core.* namespace), pass a Subject as the first argument. -// -// T("cli.success") // Simple translation -// T("core.delete", S("file", "config.yaml")) // Semantic intent -func T(messageID string, args ...any) string { - if svc := Default(); svc != nil { - return svc.T(messageID, args...) - } - return messageID -} - -// Raw is the raw translation helper without i18n.* namespace magic. -// Unlike T(), this does NOT handle i18n.* namespace patterns. -// Use this for direct key lookups without auto-composition. -// -// Raw("cli.success") // Direct lookup -// T("i18n.label.status") // Smart: returns "Status:" -func Raw(messageID string, args ...any) string { - if svc := Default(); svc != nil { - return svc.Raw(messageID, args...) - } - return messageID -} - -// ErrServiceNotInitialized is returned when the i18n service is not initialized. -var ErrServiceNotInitialized = errors.New("i18n: service not initialized") - -// SetLanguage sets the language for the default service. -// Returns ErrServiceNotInitialized if the service has not been initialized, -// or an error if the language tag is invalid or unsupported. -// -// Unlike other Set* functions, this returns an error because it validates -// the language tag against available locales. -func SetLanguage(lang string) error { - svc := Default() - if svc == nil { - return ErrServiceNotInitialized - } - return svc.SetLanguage(lang) -} - -// CurrentLanguage returns the current language code from the default service. -// Returns "en-GB" (the fallback language) if the service is not initialized. -func CurrentLanguage() string { - if svc := Default(); svc != nil { - return svc.Language() - } - return "en-GB" -} - -// SetMode sets the translation mode for the default service. -// Does nothing if the service is not initialized. -func SetMode(m Mode) { - if svc := Default(); svc != nil { - svc.SetMode(m) - } -} - -// CurrentMode returns the current translation mode from the default service. -func CurrentMode() Mode { - if svc := Default(); svc != nil { - return svc.Mode() - } - return ModeNormal -} - -// N formats a number using the i18n.numeric.* namespace. -// Wrapper for T("i18n.numeric.{format}", value). -// -// N("number", 1234567) // T("i18n.numeric.number", 1234567) -// N("percent", 0.85) // T("i18n.numeric.percent", 0.85) -// N("bytes", 1536000) // T("i18n.numeric.bytes", 1536000) -// N("ordinal", 1) // T("i18n.numeric.ordinal", 1) -func N(format string, value any) string { - return T("i18n.numeric."+format, value) -} - -// AddHandler appends a handler to the default service's handler chain. -// Does nothing if the service is not initialized. -func AddHandler(h KeyHandler) { - if svc := Default(); svc != nil { - svc.AddHandler(h) - } -} - -// PrependHandler inserts a handler at the start of the default service's handler chain. -// Does nothing if the service is not initialized. -func PrependHandler(h KeyHandler) { - if svc := Default(); svc != nil { - svc.PrependHandler(h) - } -} - -// --- Template helpers --- - -// executeIntentTemplate executes an intent template with the given data. -// Templates are cached for performance - repeated calls with the same template -// string will reuse the compiled template. -func executeIntentTemplate(tmplStr string, data templateData) string { - if tmplStr == "" { - return "" - } - - // Check cache first - if cached, ok := templateCache.Load(tmplStr); ok { - var buf bytes.Buffer - if err := cached.(*template.Template).Execute(&buf, data); err != nil { - return tmplStr - } - return buf.String() - } - - // Parse and cache - tmpl, err := template.New("").Funcs(TemplateFuncs()).Parse(tmplStr) - if err != nil { - return tmplStr - } - - // Store in cache (safe even if another goroutine stored it first) - templateCache.Store(tmplStr, tmpl) - - var buf bytes.Buffer - if err := tmpl.Execute(&buf, data); err != nil { - return tmplStr - } - return buf.String() -} - -func applyTemplate(text string, data any) string { - // Quick check for template syntax - if !strings.Contains(text, "{{") { - return text - } - - // Check cache first - if cached, ok := templateCache.Load(text); ok { - var buf bytes.Buffer - if err := cached.(*template.Template).Execute(&buf, data); err != nil { - return text - } - return buf.String() - } - - // Parse and cache - tmpl, err := template.New("").Parse(text) - if err != nil { - return text - } - - templateCache.Store(text, tmpl) - - var buf bytes.Buffer - if err := tmpl.Execute(&buf, data); err != nil { - return text - } - return buf.String() -} diff --git a/pkg/i18n/i18n_test.go b/pkg/i18n/i18n_test.go deleted file mode 100644 index a02bbac..0000000 --- a/pkg/i18n/i18n_test.go +++ /dev/null @@ -1,577 +0,0 @@ -package i18n - -import ( - "sync" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestNew(t *testing.T) { - svc, err := New() - require.NoError(t, err) - require.NotNil(t, svc) - - // Should have English available - langs := svc.AvailableLanguages() - assert.Contains(t, langs, "en-GB") -} - -func TestTranslate(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - // Basic translation - result := svc.T("cmd.dev.short") - assert.Equal(t, "Multi-repo development workflow", result) - - // Missing key returns the key - result = svc.T("nonexistent.key") - assert.Equal(t, "nonexistent.key", result) -} - -func TestTranslateWithArgs(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - // Translation with template data - result := svc.T("error.repo_not_found", map[string]string{"Name": "config.yaml"}) - assert.Equal(t, "Repository 'config.yaml' not found", result) - - result = svc.T("cmd.ai.task_pr.branch_error", map[string]string{"Branch": "main"}) - assert.Equal(t, "cannot create PR from main branch; create a feature branch first", result) -} - -func TestSetLanguage(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - // Default is en-GB - assert.Equal(t, "en-GB", svc.Language()) - - // Setting invalid language should error - err = svc.SetLanguage("xx-invalid") - assert.Error(t, err) - - // Language should still be en-GB - assert.Equal(t, "en-GB", svc.Language()) -} - -func TestDefaultService(t *testing.T) { - // Reset default for test - defaultService.Store(nil) - defaultOnce = sync.Once{} - defaultErr = nil - - err := Init() - require.NoError(t, err) - - svc := Default() - require.NotNil(t, svc) - - // Global T function should work - result := T("cmd.dev.short") - assert.Equal(t, "Multi-repo development workflow", result) -} - -func TestAddMessages(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - // Add custom messages - svc.AddMessages("en-GB", map[string]string{ - "custom.greeting": "Hello, {{.Name}}!", - }) - - result := svc.T("custom.greeting", map[string]string{"Name": "World"}) - assert.Equal(t, "Hello, World!", result) -} - -func TestAvailableLanguages(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - langs := svc.AvailableLanguages() - assert.NotEmpty(t, langs) - assert.Contains(t, langs, "en-GB") -} - -func TestDetectLanguage(t *testing.T) { - tests := []struct { - name string - langEnv string - expected string - }{ - { - name: "English exact", - langEnv: "en-GB", - expected: "en-GB", - }, - { - name: "English with encoding", - langEnv: "en_GB.UTF-8", - expected: "en-GB", - }, - { - name: "Empty LANG", - langEnv: "", - expected: "", - }, - } - - svc, err := New() - require.NoError(t, err) - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Setenv("LANG", tt.langEnv) - t.Setenv("LC_ALL", "") - t.Setenv("LC_MESSAGES", "") - - result := detectLanguage(svc.availableLangs) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestPluralization(t *testing.T) { - svc, err := New() - require.NoError(t, err) - SetDefault(svc) - - // Singular - uses i18n.count.* magic - result := svc.T("i18n.count.item", 1) - assert.Equal(t, "1 item", result) - - // Plural - result = svc.T("i18n.count.item", 5) - assert.Equal(t, "5 items", result) - - // Zero uses plural - result = svc.T("i18n.count.item", 0) - assert.Equal(t, "0 items", result) -} - -func TestNestedKeys(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - // Nested key - result := svc.T("cmd.dev.short") - assert.Equal(t, "Multi-repo development workflow", result) - - // Deeper nested key (flat key with dots) - result = svc.T("cmd.dev.push.short") - assert.Equal(t, "Push commits across all repos", result) -} - -func TestMessage_ForCategory(t *testing.T) { - t.Run("basic categories", func(t *testing.T) { - msg := Message{ - Zero: "no items", - One: "1 item", - Two: "2 items", - Few: "a few items", - Many: "many items", - Other: "some items", - } - - assert.Equal(t, "no items", msg.ForCategory(PluralZero)) - assert.Equal(t, "1 item", msg.ForCategory(PluralOne)) - assert.Equal(t, "2 items", msg.ForCategory(PluralTwo)) - assert.Equal(t, "a few items", msg.ForCategory(PluralFew)) - assert.Equal(t, "many items", msg.ForCategory(PluralMany)) - assert.Equal(t, "some items", msg.ForCategory(PluralOther)) - }) - - t.Run("fallback to other", func(t *testing.T) { - msg := Message{ - One: "1 item", - Other: "items", - } - - // Categories without explicit values fall back to Other - assert.Equal(t, "items", msg.ForCategory(PluralZero)) - assert.Equal(t, "1 item", msg.ForCategory(PluralOne)) - assert.Equal(t, "items", msg.ForCategory(PluralFew)) - }) - - t.Run("fallback to one then text", func(t *testing.T) { - msg := Message{ - One: "single item", - } - - // Falls back to One when Other is empty - assert.Equal(t, "single item", msg.ForCategory(PluralOther)) - assert.Equal(t, "single item", msg.ForCategory(PluralMany)) - }) -} - -func TestServiceFormality(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - t.Run("default is neutral", func(t *testing.T) { - assert.Equal(t, FormalityNeutral, svc.Formality()) - }) - - t.Run("set formality", func(t *testing.T) { - svc.SetFormality(FormalityFormal) - assert.Equal(t, FormalityFormal, svc.Formality()) - - svc.SetFormality(FormalityInformal) - assert.Equal(t, FormalityInformal, svc.Formality()) - }) -} - -func TestServiceDirection(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - t.Run("English is LTR", func(t *testing.T) { - err := svc.SetLanguage("en-GB") - require.NoError(t, err) - - assert.Equal(t, DirLTR, svc.Direction()) - assert.False(t, svc.IsRTL()) - }) -} - -func TestServicePluralCategory(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - t.Run("English plural rules", func(t *testing.T) { - assert.Equal(t, PluralOne, svc.PluralCategory(1)) - assert.Equal(t, PluralOther, svc.PluralCategory(0)) - assert.Equal(t, PluralOther, svc.PluralCategory(5)) - }) -} - -func TestDebugMode(t *testing.T) { - t.Run("default is disabled", func(t *testing.T) { - svc, err := New() - require.NoError(t, err) - assert.False(t, svc.Debug()) - }) - - t.Run("T with debug mode", func(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - // Without debug - result := svc.T("cmd.dev.short") - assert.Equal(t, "Multi-repo development workflow", result) - - // Enable debug - svc.SetDebug(true) - assert.True(t, svc.Debug()) - - // With debug - shows key prefix - result = svc.T("cmd.dev.short") - assert.Equal(t, "[cmd.dev.short] Multi-repo development workflow", result) - - // Disable debug - svc.SetDebug(false) - result = svc.T("cmd.dev.short") - assert.Equal(t, "Multi-repo development workflow", result) - }) - - t.Run("package-level SetDebug", func(t *testing.T) { - // Reset default - defaultService.Store(nil) - defaultOnce = sync.Once{} - defaultErr = nil - - err := Init() - require.NoError(t, err) - - // Enable debug via package function - SetDebug(true) - assert.True(t, Default().Debug()) - - // Translate - result := T("cmd.dev.short") - assert.Equal(t, "[cmd.dev.short] Multi-repo development workflow", result) - - // Cleanup - SetDebug(false) - }) -} - -func TestI18nNamespaceMagic(t *testing.T) { - svc, err := New() - require.NoError(t, err) - SetDefault(svc) - - tests := []struct { - name string - key string - args []any - expected string - }{ - {"label", "i18n.label.status", nil, "Status:"}, - {"label version", "i18n.label.version", nil, "Version:"}, - {"progress", "i18n.progress.build", nil, "Building..."}, - {"progress check", "i18n.progress.check", nil, "Checking..."}, - {"progress with subject", "i18n.progress.check", []any{"config"}, "Checking config..."}, - {"count singular", "i18n.count.file", []any{1}, "1 file"}, - {"count plural", "i18n.count.file", []any{5}, "5 files"}, - {"done", "i18n.done.delete", []any{"file"}, "File deleted"}, - {"done build", "i18n.done.build", []any{"project"}, "Project built"}, - {"fail", "i18n.fail.delete", []any{"file"}, "Failed to delete file"}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := svc.T(tt.key, tt.args...) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestRawBypassesI18nNamespace(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - // Raw() should return key as-is since i18n.label.status isn't in JSON - result := svc.Raw("i18n.label.status") - assert.Equal(t, "i18n.label.status", result) - - // T() should compose it - result = svc.T("i18n.label.status") - assert.Equal(t, "Status:", result) -} - -func TestFormalityMessageSelection(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - // Add test messages with formality variants - svc.AddMessages("en-GB", map[string]string{ - "greeting": "Hello", - "greeting._formal": "Good morning, sir", - "greeting._informal": "Hey there", - "farewell": "Goodbye", - "farewell._formal": "Farewell", - }) - - t.Run("neutral formality uses base key", func(t *testing.T) { - svc.SetFormality(FormalityNeutral) - assert.Equal(t, "Hello", svc.T("greeting")) - assert.Equal(t, "Goodbye", svc.T("farewell")) - }) - - t.Run("formal uses ._formal variant", func(t *testing.T) { - svc.SetFormality(FormalityFormal) - assert.Equal(t, "Good morning, sir", svc.T("greeting")) - assert.Equal(t, "Farewell", svc.T("farewell")) - }) - - t.Run("informal uses ._informal variant", func(t *testing.T) { - svc.SetFormality(FormalityInformal) - assert.Equal(t, "Hey there", svc.T("greeting")) - // No informal variant for farewell, falls back to base - assert.Equal(t, "Goodbye", svc.T("farewell")) - }) - - t.Run("subject formality overrides service formality", func(t *testing.T) { - svc.SetFormality(FormalityNeutral) - - // Subject with formal overrides neutral service - result := svc.T("greeting", S("user", "test").Formal()) - assert.Equal(t, "Good morning, sir", result) - - // Subject with informal overrides neutral service - result = svc.T("greeting", S("user", "test").Informal()) - assert.Equal(t, "Hey there", result) - }) - - t.Run("subject formality overrides service formal", func(t *testing.T) { - svc.SetFormality(FormalityFormal) - - // Subject with informal overrides formal service - result := svc.T("greeting", S("user", "test").Informal()) - assert.Equal(t, "Hey there", result) - }) - - t.Run("context formality overrides service formality", func(t *testing.T) { - svc.SetFormality(FormalityNeutral) - - // TranslationContext with formal overrides neutral service - result := svc.T("greeting", C("user greeting").Formal()) - assert.Equal(t, "Good morning, sir", result) - - // TranslationContext with informal overrides neutral service - result = svc.T("greeting", C("user greeting").Informal()) - assert.Equal(t, "Hey there", result) - }) - - t.Run("context formality overrides service formal", func(t *testing.T) { - svc.SetFormality(FormalityFormal) - - // TranslationContext with informal overrides formal service - result := svc.T("greeting", C("user greeting").Informal()) - assert.Equal(t, "Hey there", result) - }) -} - -func TestNewWithOptions(t *testing.T) { - t.Run("WithFallback", func(t *testing.T) { - svc, err := New(WithFallback("de-DE")) - require.NoError(t, err) - assert.Equal(t, "de-DE", svc.fallbackLang) - }) - - t.Run("WithFormality", func(t *testing.T) { - svc, err := New(WithFormality(FormalityFormal)) - require.NoError(t, err) - assert.Equal(t, FormalityFormal, svc.Formality()) - }) - - t.Run("WithMode", func(t *testing.T) { - svc, err := New(WithMode(ModeStrict)) - require.NoError(t, err) - assert.Equal(t, ModeStrict, svc.Mode()) - }) - - t.Run("WithDebug", func(t *testing.T) { - svc, err := New(WithDebug(true)) - require.NoError(t, err) - assert.True(t, svc.Debug()) - }) - - t.Run("WithHandlers replaces defaults", func(t *testing.T) { - customHandler := LabelHandler{} - svc, err := New(WithHandlers(customHandler)) - require.NoError(t, err) - assert.Len(t, svc.Handlers(), 1) - }) - - t.Run("WithDefaultHandlers adds back defaults", func(t *testing.T) { - svc, err := New(WithHandlers(), WithDefaultHandlers()) - require.NoError(t, err) - assert.Len(t, svc.Handlers(), 6) // 6 default handlers - }) - - t.Run("multiple options", func(t *testing.T) { - svc, err := New( - WithFallback("fr-FR"), - WithFormality(FormalityInformal), - WithMode(ModeCollect), - WithDebug(true), - ) - require.NoError(t, err) - assert.Equal(t, "fr-FR", svc.fallbackLang) - assert.Equal(t, FormalityInformal, svc.Formality()) - assert.Equal(t, ModeCollect, svc.Mode()) - assert.True(t, svc.Debug()) - }) -} - -func TestNewWithLoader(t *testing.T) { - t.Run("uses custom loader", func(t *testing.T) { - loader := NewFSLoader(localeFS, "locales") - svc, err := NewWithLoader(loader) - require.NoError(t, err) - assert.NotNil(t, svc.loader) - assert.Contains(t, svc.AvailableLanguages(), "en-GB") - }) - - t.Run("with options", func(t *testing.T) { - loader := NewFSLoader(localeFS, "locales") - svc, err := NewWithLoader(loader, WithFallback("de-DE"), WithFormality(FormalityFormal)) - require.NoError(t, err) - assert.Equal(t, "de-DE", svc.fallbackLang) - assert.Equal(t, FormalityFormal, svc.Formality()) - }) -} - -func TestNewWithFS(t *testing.T) { - t.Run("with options", func(t *testing.T) { - svc, err := NewWithFS(localeFS, "locales", WithDebug(true)) - require.NoError(t, err) - assert.True(t, svc.Debug()) - }) -} - -func TestConcurrentTranslation(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - t.Run("concurrent T calls", func(t *testing.T) { - var wg sync.WaitGroup - for i := 0; i < 100; i++ { - wg.Add(1) - go func() { - defer wg.Done() - result := svc.T("cmd.dev.short") - assert.Equal(t, "Multi-repo development workflow", result) - }() - } - wg.Wait() - }) - - t.Run("concurrent T with args", func(t *testing.T) { - var wg sync.WaitGroup - for i := 0; i < 100; i++ { - wg.Add(1) - go func(n int) { - defer wg.Done() - result := svc.T("i18n.count.file", n) - if n == 1 { - assert.Equal(t, "1 file", result) - } else { - assert.Contains(t, result, "files") - } - }(i) - } - wg.Wait() - }) - - t.Run("concurrent read and write", func(t *testing.T) { - var wg sync.WaitGroup - - // Readers - for i := 0; i < 50; i++ { - wg.Add(1) - go func() { - defer wg.Done() - _ = svc.T("cmd.dev.short") - _ = svc.Language() - _ = svc.Formality() - }() - } - - // Writers - for i := 0; i < 10; i++ { - wg.Add(1) - go func() { - defer wg.Done() - svc.SetFormality(FormalityNeutral) - svc.SetDebug(false) - }() - } - - wg.Wait() - }) -} - -func TestConcurrentDefault(t *testing.T) { - // Reset for test - defaultService.Store(nil) - defaultOnce = sync.Once{} - defaultErr = nil - - var wg sync.WaitGroup - for i := 0; i < 50; i++ { - wg.Add(1) - go func() { - defer wg.Done() - svc := Default() - assert.NotNil(t, svc) - }() - } - wg.Wait() -} diff --git a/pkg/i18n/interface_test.go b/pkg/i18n/interface_test.go deleted file mode 100644 index fde57a5..0000000 --- a/pkg/i18n/interface_test.go +++ /dev/null @@ -1,74 +0,0 @@ -package i18n - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestServiceImplementsTranslator(t *testing.T) { - // This test verifies at compile time that Service implements Translator - var _ Translator = (*Service)(nil) - - // Create a service and use it through the interface - var translator Translator - svc, err := New() - require.NoError(t, err) - - translator = svc - - // Test interface methods - assert.Equal(t, "Multi-repo development workflow", translator.T("cmd.dev.short")) - assert.NotEmpty(t, translator.Language()) - assert.NotNil(t, translator.Direction()) - assert.NotNil(t, translator.Formality()) -} - -// MockTranslator demonstrates how to create a mock for testing -type MockTranslator struct { - translations map[string]string - language string -} - -func (m *MockTranslator) T(key string, args ...any) string { - if v, ok := m.translations[key]; ok { - return v - } - return key -} - -func (m *MockTranslator) SetLanguage(lang string) error { - m.language = lang - return nil -} - -func (m *MockTranslator) Language() string { - return m.language -} - -func (m *MockTranslator) SetMode(mode Mode) {} -func (m *MockTranslator) Mode() Mode { return ModeNormal } -func (m *MockTranslator) SetDebug(enabled bool) {} -func (m *MockTranslator) Debug() bool { return false } -func (m *MockTranslator) SetFormality(f Formality) {} -func (m *MockTranslator) Formality() Formality { return FormalityNeutral } -func (m *MockTranslator) Direction() TextDirection { return DirLTR } -func (m *MockTranslator) IsRTL() bool { return false } -func (m *MockTranslator) PluralCategory(n int) PluralCategory { - return PluralOther -} -func (m *MockTranslator) AvailableLanguages() []string { return []string{"en-GB"} } - -func TestMockTranslator(t *testing.T) { - var translator Translator = &MockTranslator{ - translations: map[string]string{ - "test.hello": "Hello from mock", - }, - language: "en-GB", - } - - assert.Equal(t, "Hello from mock", translator.T("test.hello")) - assert.Equal(t, "test.missing", translator.T("test.missing")) - assert.Equal(t, "en-GB", translator.Language()) -} diff --git a/pkg/i18n/language.go b/pkg/i18n/language.go deleted file mode 100644 index 638ca78..0000000 --- a/pkg/i18n/language.go +++ /dev/null @@ -1,192 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -// String returns the string representation of the Formality. -func (f Formality) String() string { - switch f { - case FormalityInformal: - return "informal" - case FormalityFormal: - return "formal" - default: - return "neutral" - } -} - -// String returns the string representation of the TextDirection. -func (d TextDirection) String() string { - if d == DirRTL { - return "rtl" - } - return "ltr" -} - -// String returns the string representation of the PluralCategory. -func (p PluralCategory) String() string { - switch p { - case PluralZero: - return "zero" - case PluralOne: - return "one" - case PluralTwo: - return "two" - case PluralFew: - return "few" - case PluralMany: - return "many" - default: - return "other" - } -} - -// String returns the string representation of the GrammaticalGender. -func (g GrammaticalGender) String() string { - switch g { - case GenderMasculine: - return "masculine" - case GenderFeminine: - return "feminine" - case GenderCommon: - return "common" - default: - return "neuter" - } -} - -// IsRTLLanguage returns true if the language code uses right-to-left text. -func IsRTLLanguage(lang string) bool { - // Check exact match first - if rtlLanguages[lang] { - return true - } - // Check base language (e.g., "ar" for "ar-SA") - if len(lang) > 2 { - base := lang[:2] - return rtlLanguages[base] - } - return false -} - -// pluralRuleEnglish returns the plural category for English. -// Categories: one (n=1), other. -func pluralRuleEnglish(n int) PluralCategory { - if n == 1 { - return PluralOne - } - return PluralOther -} - -// pluralRuleGerman returns the plural category for German. -// Categories: same as English. -func pluralRuleGerman(n int) PluralCategory { - return pluralRuleEnglish(n) -} - -// pluralRuleFrench returns the plural category for French. -// Categories: one (n=0,1), other. -func pluralRuleFrench(n int) PluralCategory { - if n == 0 || n == 1 { - return PluralOne - } - return PluralOther -} - -// pluralRuleSpanish returns the plural category for Spanish. -// Categories: one (n=1), other. -func pluralRuleSpanish(n int) PluralCategory { - if n == 1 { - return PluralOne - } - return PluralOther -} - -// pluralRuleRussian returns the plural category for Russian. -// Categories: one (n%10=1, n%100!=11), few (n%10=2-4, n%100!=12-14), many (others). -func pluralRuleRussian(n int) PluralCategory { - mod10 := n % 10 - mod100 := n % 100 - - if mod10 == 1 && mod100 != 11 { - return PluralOne - } - if mod10 >= 2 && mod10 <= 4 && (mod100 < 12 || mod100 > 14) { - return PluralFew - } - return PluralMany -} - -// pluralRulePolish returns the plural category for Polish. -// Categories: one (n=1), few (n%10=2-4, n%100!=12-14), many (others). -func pluralRulePolish(n int) PluralCategory { - if n == 1 { - return PluralOne - } - mod10 := n % 10 - mod100 := n % 100 - if mod10 >= 2 && mod10 <= 4 && (mod100 < 12 || mod100 > 14) { - return PluralFew - } - return PluralMany -} - -// pluralRuleArabic returns the plural category for Arabic. -// Categories: zero (n=0), one (n=1), two (n=2), few (n%100=3-10), many (n%100=11-99), other. -func pluralRuleArabic(n int) PluralCategory { - if n == 0 { - return PluralZero - } - if n == 1 { - return PluralOne - } - if n == 2 { - return PluralTwo - } - mod100 := n % 100 - if mod100 >= 3 && mod100 <= 10 { - return PluralFew - } - if mod100 >= 11 && mod100 <= 99 { - return PluralMany - } - return PluralOther -} - -// pluralRuleChinese returns the plural category for Chinese. -// Categories: other (no plural distinction). -func pluralRuleChinese(n int) PluralCategory { - return PluralOther -} - -// pluralRuleJapanese returns the plural category for Japanese. -// Categories: other (no plural distinction). -func pluralRuleJapanese(n int) PluralCategory { - return PluralOther -} - -// pluralRuleKorean returns the plural category for Korean. -// Categories: other (no plural distinction). -func pluralRuleKorean(n int) PluralCategory { - return PluralOther -} - -// GetPluralRule returns the plural rule for a language code. -// Falls back to English rules if the language is not found. -func GetPluralRule(lang string) PluralRule { - if rule, ok := pluralRules[lang]; ok { - return rule - } - // Try base language - if len(lang) > 2 { - base := lang[:2] - if rule, ok := pluralRules[base]; ok { - return rule - } - } - // Default to English - return pluralRuleEnglish -} - -// GetPluralCategory returns the plural category for a count in the given language. -func GetPluralCategory(lang string, n int) PluralCategory { - return GetPluralRule(lang)(n) -} diff --git a/pkg/i18n/language_test.go b/pkg/i18n/language_test.go deleted file mode 100644 index 617b5e6..0000000 --- a/pkg/i18n/language_test.go +++ /dev/null @@ -1,172 +0,0 @@ -package i18n - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestFormality_String(t *testing.T) { - tests := []struct { - f Formality - expected string - }{ - {FormalityNeutral, "neutral"}, - {FormalityInformal, "informal"}, - {FormalityFormal, "formal"}, - {Formality(99), "neutral"}, // Unknown defaults to neutral - } - - for _, tt := range tests { - assert.Equal(t, tt.expected, tt.f.String()) - } -} - -func TestTextDirection_String(t *testing.T) { - assert.Equal(t, "ltr", DirLTR.String()) - assert.Equal(t, "rtl", DirRTL.String()) -} - -func TestPluralCategory_String(t *testing.T) { - tests := []struct { - cat PluralCategory - expected string - }{ - {PluralZero, "zero"}, - {PluralOne, "one"}, - {PluralTwo, "two"}, - {PluralFew, "few"}, - {PluralMany, "many"}, - {PluralOther, "other"}, - } - - for _, tt := range tests { - assert.Equal(t, tt.expected, tt.cat.String()) - } -} - -func TestGrammaticalGender_String(t *testing.T) { - tests := []struct { - g GrammaticalGender - expected string - }{ - {GenderNeuter, "neuter"}, - {GenderMasculine, "masculine"}, - {GenderFeminine, "feminine"}, - {GenderCommon, "common"}, - } - - for _, tt := range tests { - assert.Equal(t, tt.expected, tt.g.String()) - } -} - -func TestIsRTLLanguage(t *testing.T) { - // RTL languages - assert.True(t, IsRTLLanguage("ar")) - assert.True(t, IsRTLLanguage("ar-SA")) - assert.True(t, IsRTLLanguage("he")) - assert.True(t, IsRTLLanguage("he-IL")) - assert.True(t, IsRTLLanguage("fa")) - assert.True(t, IsRTLLanguage("ur")) - - // LTR languages - assert.False(t, IsRTLLanguage("en")) - assert.False(t, IsRTLLanguage("en-GB")) - assert.False(t, IsRTLLanguage("de")) - assert.False(t, IsRTLLanguage("fr")) - assert.False(t, IsRTLLanguage("zh")) -} - -func TestPluralRuleEnglish(t *testing.T) { - tests := []struct { - n int - expected PluralCategory - }{ - {0, PluralOther}, - {1, PluralOne}, - {2, PluralOther}, - {5, PluralOther}, - {100, PluralOther}, - } - - for _, tt := range tests { - assert.Equal(t, tt.expected, pluralRuleEnglish(tt.n), "count=%d", tt.n) - } -} - -func TestPluralRuleFrench(t *testing.T) { - // French uses singular for 0 and 1 - assert.Equal(t, PluralOne, pluralRuleFrench(0)) - assert.Equal(t, PluralOne, pluralRuleFrench(1)) - assert.Equal(t, PluralOther, pluralRuleFrench(2)) -} - -func TestPluralRuleRussian(t *testing.T) { - tests := []struct { - n int - expected PluralCategory - }{ - {1, PluralOne}, - {2, PluralFew}, - {3, PluralFew}, - {4, PluralFew}, - {5, PluralMany}, - {11, PluralMany}, - {12, PluralMany}, - {21, PluralOne}, - {22, PluralFew}, - {25, PluralMany}, - } - - for _, tt := range tests { - assert.Equal(t, tt.expected, pluralRuleRussian(tt.n), "count=%d", tt.n) - } -} - -func TestPluralRuleArabic(t *testing.T) { - tests := []struct { - n int - expected PluralCategory - }{ - {0, PluralZero}, - {1, PluralOne}, - {2, PluralTwo}, - {3, PluralFew}, - {10, PluralFew}, - {11, PluralMany}, - {99, PluralMany}, - {100, PluralOther}, - } - - for _, tt := range tests { - assert.Equal(t, tt.expected, pluralRuleArabic(tt.n), "count=%d", tt.n) - } -} - -func TestPluralRuleChinese(t *testing.T) { - // Chinese has no plural distinction - assert.Equal(t, PluralOther, pluralRuleChinese(0)) - assert.Equal(t, PluralOther, pluralRuleChinese(1)) - assert.Equal(t, PluralOther, pluralRuleChinese(100)) -} - -func TestGetPluralRule(t *testing.T) { - // Known languages - rule := GetPluralRule("en-GB") - assert.Equal(t, PluralOne, rule(1)) - - rule = GetPluralRule("ru") - assert.Equal(t, PluralFew, rule(2)) - - // Unknown language falls back to English - rule = GetPluralRule("xx-unknown") - assert.Equal(t, PluralOne, rule(1)) - assert.Equal(t, PluralOther, rule(2)) -} - -func TestGetPluralCategory(t *testing.T) { - assert.Equal(t, PluralOne, GetPluralCategory("en", 1)) - assert.Equal(t, PluralOther, GetPluralCategory("en", 5)) - assert.Equal(t, PluralFew, GetPluralCategory("ru", 3)) -} diff --git a/pkg/i18n/loader.go b/pkg/i18n/loader.go deleted file mode 100644 index b6df355..0000000 --- a/pkg/i18n/loader.go +++ /dev/null @@ -1,274 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -import ( - "encoding/json" - "fmt" - "io/fs" - "path" - "strings" - "sync" -) - -// FSLoader loads translations from a filesystem (embedded or disk). -type FSLoader struct { - fsys fs.FS - dir string - - // Cache of available languages (populated on first Languages() call) - languages []string - langOnce sync.Once - langErr error // Error from directory scan, if any -} - -// NewFSLoader creates a loader for the given filesystem and directory. -func NewFSLoader(fsys fs.FS, dir string) *FSLoader { - return &FSLoader{ - fsys: fsys, - dir: dir, - } -} - -// Load implements Loader.Load - loads messages and grammar for a language. -func (l *FSLoader) Load(lang string) (map[string]Message, *GrammarData, error) { - // Try both hyphen and underscore variants - variants := []string{ - lang + ".json", - strings.ReplaceAll(lang, "-", "_") + ".json", - strings.ReplaceAll(lang, "_", "-") + ".json", - } - - var data []byte - var err error - for _, filename := range variants { - filePath := path.Join(l.dir, filename) // Use path.Join for fs.FS (forward slashes) - data, err = fs.ReadFile(l.fsys, filePath) - if err == nil { - break - } - } - if err != nil { - return nil, nil, fmt.Errorf("locale %q not found: %w", lang, err) - } - - var raw map[string]any - if err := json.Unmarshal(data, &raw); err != nil { - return nil, nil, fmt.Errorf("invalid JSON in locale %q: %w", lang, err) - } - - messages := make(map[string]Message) - grammar := &GrammarData{ - Verbs: make(map[string]VerbForms), - Nouns: make(map[string]NounForms), - Words: make(map[string]string), - } - - flattenWithGrammar("", raw, messages, grammar) - - return messages, grammar, nil -} - -// Languages implements Loader.Languages - returns available language codes. -// Thread-safe: uses sync.Once to ensure the directory is scanned only once. -// Returns nil if the directory scan failed (check LanguagesErr for details). -func (l *FSLoader) Languages() []string { - l.langOnce.Do(func() { - entries, err := fs.ReadDir(l.fsys, l.dir) - if err != nil { - l.langErr = fmt.Errorf("failed to read locale directory %q: %w", l.dir, err) - return - } - - for _, entry := range entries { - if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".json") { - continue - } - lang := strings.TrimSuffix(entry.Name(), ".json") - // Normalise underscore to hyphen (en_GB -> en-GB) - lang = strings.ReplaceAll(lang, "_", "-") - l.languages = append(l.languages, lang) - } - }) - - return l.languages -} - -// LanguagesErr returns any error that occurred during Languages() scan. -// Returns nil if the scan succeeded. -func (l *FSLoader) LanguagesErr() error { - l.Languages() // Ensure scan has been attempted - return l.langErr -} - -// Ensure FSLoader implements Loader at compile time. -var _ Loader = (*FSLoader)(nil) - -// --- Flatten helpers --- - -// flatten recursively flattens nested maps into dot-notation keys. -func flatten(prefix string, data map[string]any, out map[string]Message) { - flattenWithGrammar(prefix, data, out, nil) -} - -// flattenWithGrammar recursively flattens nested maps and extracts grammar data. -func flattenWithGrammar(prefix string, data map[string]any, out map[string]Message, grammar *GrammarData) { - for key, value := range data { - fullKey := key - if prefix != "" { - fullKey = prefix + "." + key - } - - switch v := value.(type) { - case string: - // Check if this is a word in gram.word.* - if grammar != nil && strings.HasPrefix(fullKey, "gram.word.") { - wordKey := strings.TrimPrefix(fullKey, "gram.word.") - grammar.Words[strings.ToLower(wordKey)] = v - continue - } - out[fullKey] = Message{Text: v} - - case map[string]any: - // Check if this is a verb form object - // Grammar data lives under "gram.*" (a nod to Gram - grandmother) - if grammar != nil && isVerbFormObject(v) { - verbName := key - if strings.HasPrefix(fullKey, "gram.verb.") { - verbName = strings.TrimPrefix(fullKey, "gram.verb.") - } - forms := VerbForms{} - if past, ok := v["past"].(string); ok { - forms.Past = past - } - if gerund, ok := v["gerund"].(string); ok { - forms.Gerund = gerund - } - grammar.Verbs[strings.ToLower(verbName)] = forms - continue - } - - // Check if this is a noun form object - if grammar != nil && isNounFormObject(v) { - nounName := key - if strings.HasPrefix(fullKey, "gram.noun.") { - nounName = strings.TrimPrefix(fullKey, "gram.noun.") - } - forms := NounForms{} - if one, ok := v["one"].(string); ok { - forms.One = one - } - if other, ok := v["other"].(string); ok { - forms.Other = other - } - if gender, ok := v["gender"].(string); ok { - forms.Gender = gender - } - grammar.Nouns[strings.ToLower(nounName)] = forms - continue - } - - // Check if this is an article object - if grammar != nil && fullKey == "gram.article" { - if indef, ok := v["indefinite"].(map[string]any); ok { - if def, ok := indef["default"].(string); ok { - grammar.Articles.IndefiniteDefault = def - } - if vowel, ok := indef["vowel"].(string); ok { - grammar.Articles.IndefiniteVowel = vowel - } - } - if def, ok := v["definite"].(string); ok { - grammar.Articles.Definite = def - } - continue - } - - // Check if this is a punctuation rules object - if grammar != nil && fullKey == "gram.punct" { - if label, ok := v["label"].(string); ok { - grammar.Punct.LabelSuffix = label - } - if progress, ok := v["progress"].(string); ok { - grammar.Punct.ProgressSuffix = progress - } - continue - } - - // Check if this is a plural object (has CLDR plural category keys) - if isPluralObject(v) { - msg := Message{} - if zero, ok := v["zero"].(string); ok { - msg.Zero = zero - } - if one, ok := v["one"].(string); ok { - msg.One = one - } - if two, ok := v["two"].(string); ok { - msg.Two = two - } - if few, ok := v["few"].(string); ok { - msg.Few = few - } - if many, ok := v["many"].(string); ok { - msg.Many = many - } - if other, ok := v["other"].(string); ok { - msg.Other = other - } - out[fullKey] = msg - } else { - // Recurse into nested object - flattenWithGrammar(fullKey, v, out, grammar) - } - } - } -} - -// --- Check helpers --- - -// isVerbFormObject checks if a map represents verb conjugation forms. -func isVerbFormObject(m map[string]any) bool { - _, hasBase := m["base"] - _, hasPast := m["past"] - _, hasGerund := m["gerund"] - return (hasBase || hasPast || hasGerund) && !isPluralObject(m) -} - -// isNounFormObject checks if a map represents noun forms (with gender). -// Noun form objects have "gender" field, distinguishing them from CLDR plural objects. -func isNounFormObject(m map[string]any) bool { - _, hasGender := m["gender"] - return hasGender -} - -// hasPluralCategories checks if a map has CLDR plural categories beyond one/other. -func hasPluralCategories(m map[string]any) bool { - _, hasZero := m["zero"] - _, hasTwo := m["two"] - _, hasFew := m["few"] - _, hasMany := m["many"] - return hasZero || hasTwo || hasFew || hasMany -} - -// isPluralObject checks if a map represents plural forms. -// Recognizes all CLDR plural categories: zero, one, two, few, many, other. -func isPluralObject(m map[string]any) bool { - _, hasZero := m["zero"] - _, hasOne := m["one"] - _, hasTwo := m["two"] - _, hasFew := m["few"] - _, hasMany := m["many"] - _, hasOther := m["other"] - - // It's a plural object if it has any plural category key - if !hasZero && !hasOne && !hasTwo && !hasFew && !hasMany && !hasOther { - return false - } - // But not if it contains nested objects (those are namespace containers) - for _, v := range m { - if _, isMap := v.(map[string]any); isMap { - return false - } - } - return true -} diff --git a/pkg/i18n/loader_test.go b/pkg/i18n/loader_test.go deleted file mode 100644 index 0af3573..0000000 --- a/pkg/i18n/loader_test.go +++ /dev/null @@ -1,589 +0,0 @@ -package i18n - -import ( - "testing" - "testing/fstest" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestFSLoader_Load(t *testing.T) { - t.Run("loads simple messages", func(t *testing.T) { - fsys := fstest.MapFS{ - "locales/en.json": &fstest.MapFile{ - Data: []byte(`{"hello": "world", "nested": {"key": "value"}}`), - }, - } - loader := NewFSLoader(fsys, "locales") - messages, grammar, err := loader.Load("en") - require.NoError(t, err) - assert.NotNil(t, grammar) - assert.Equal(t, "world", messages["hello"].Text) - assert.Equal(t, "value", messages["nested.key"].Text) - }) - - t.Run("handles underscore/hyphen variants", func(t *testing.T) { - fsys := fstest.MapFS{ - "locales/en_GB.json": &fstest.MapFile{ - Data: []byte(`{"greeting": "Hello"}`), - }, - } - loader := NewFSLoader(fsys, "locales") - messages, _, err := loader.Load("en-GB") - require.NoError(t, err) - assert.Equal(t, "Hello", messages["greeting"].Text) - }) - - t.Run("returns error for missing language", func(t *testing.T) { - fsys := fstest.MapFS{} - loader := NewFSLoader(fsys, "locales") - _, _, err := loader.Load("fr") - assert.Error(t, err) - assert.Contains(t, err.Error(), "not found") - }) - - t.Run("extracts grammar data", func(t *testing.T) { - fsys := fstest.MapFS{ - "locales/en.json": &fstest.MapFile{ - Data: []byte(`{ - "gram": { - "verb": { - "run": {"past": "ran", "gerund": "running"} - }, - "noun": { - "file": {"one": "file", "other": "files", "gender": "neuter"} - } - } - }`), - }, - } - loader := NewFSLoader(fsys, "locales") - _, grammar, err := loader.Load("en") - require.NoError(t, err) - assert.Equal(t, "ran", grammar.Verbs["run"].Past) - assert.Equal(t, "running", grammar.Verbs["run"].Gerund) - assert.Equal(t, "files", grammar.Nouns["file"].Other) - }) -} - -func TestFSLoader_Languages(t *testing.T) { - t.Run("lists available languages", func(t *testing.T) { - fsys := fstest.MapFS{ - "locales/en.json": &fstest.MapFile{Data: []byte(`{}`)}, - "locales/de.json": &fstest.MapFile{Data: []byte(`{}`)}, - "locales/fr_FR.json": &fstest.MapFile{Data: []byte(`{}`)}, - } - loader := NewFSLoader(fsys, "locales") - langs := loader.Languages() - assert.Contains(t, langs, "en") - assert.Contains(t, langs, "de") - assert.Contains(t, langs, "fr-FR") // normalised - }) - - t.Run("caches result", func(t *testing.T) { - fsys := fstest.MapFS{ - "locales/en.json": &fstest.MapFile{Data: []byte(`{}`)}, - } - loader := NewFSLoader(fsys, "locales") - langs1 := loader.Languages() - langs2 := loader.Languages() - assert.Equal(t, langs1, langs2) - }) - - t.Run("empty directory", func(t *testing.T) { - fsys := fstest.MapFS{} - loader := NewFSLoader(fsys, "locales") - langs := loader.Languages() - assert.Empty(t, langs) - }) -} - -func TestFlatten(t *testing.T) { - tests := []struct { - name string - prefix string - data map[string]any - expected map[string]Message - }{ - { - name: "simple string", - prefix: "", - data: map[string]any{"hello": "world"}, - expected: map[string]Message{ - "hello": {Text: "world"}, - }, - }, - { - name: "nested object", - prefix: "", - data: map[string]any{ - "cli": map[string]any{ - "success": "Done", - "error": "Failed", - }, - }, - expected: map[string]Message{ - "cli.success": {Text: "Done"}, - "cli.error": {Text: "Failed"}, - }, - }, - { - name: "with prefix", - prefix: "app", - data: map[string]any{"key": "value"}, - expected: map[string]Message{ - "app.key": {Text: "value"}, - }, - }, - { - name: "deeply nested", - prefix: "", - data: map[string]any{ - "a": map[string]any{ - "b": map[string]any{ - "c": "deep value", - }, - }, - }, - expected: map[string]Message{ - "a.b.c": {Text: "deep value"}, - }, - }, - { - name: "plural object", - prefix: "", - data: map[string]any{ - "items": map[string]any{ - "one": "{{.Count}} item", - "other": "{{.Count}} items", - }, - }, - expected: map[string]Message{ - "items": {One: "{{.Count}} item", Other: "{{.Count}} items"}, - }, - }, - { - name: "full CLDR plural", - prefix: "", - data: map[string]any{ - "files": map[string]any{ - "zero": "no files", - "one": "one file", - "two": "two files", - "few": "a few files", - "many": "many files", - "other": "{{.Count}} files", - }, - }, - expected: map[string]Message{ - "files": { - Zero: "no files", - One: "one file", - Two: "two files", - Few: "a few files", - Many: "many files", - Other: "{{.Count}} files", - }, - }, - }, - { - name: "mixed content", - prefix: "", - data: map[string]any{ - "simple": "text", - "plural": map[string]any{ - "one": "singular", - "other": "plural", - }, - "nested": map[string]any{ - "child": "nested value", - }, - }, - expected: map[string]Message{ - "simple": {Text: "text"}, - "plural": {One: "singular", Other: "plural"}, - "nested.child": {Text: "nested value"}, - }, - }, - { - name: "empty data", - prefix: "", - data: map[string]any{}, - expected: map[string]Message{}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - out := make(map[string]Message) - flatten(tt.prefix, tt.data, out) - assert.Equal(t, tt.expected, out) - }) - } -} - -func TestFlattenWithGrammar(t *testing.T) { - t.Run("extracts verb forms", func(t *testing.T) { - data := map[string]any{ - "gram": map[string]any{ - "verb": map[string]any{ - "run": map[string]any{ - "base": "run", - "past": "ran", - "gerund": "running", - }, - }, - }, - } - out := make(map[string]Message) - grammar := &GrammarData{ - Verbs: make(map[string]VerbForms), - Nouns: make(map[string]NounForms), - } - flattenWithGrammar("", data, out, grammar) - - assert.Contains(t, grammar.Verbs, "run") - assert.Equal(t, "ran", grammar.Verbs["run"].Past) - assert.Equal(t, "running", grammar.Verbs["run"].Gerund) - }) - - t.Run("extracts noun forms", func(t *testing.T) { - data := map[string]any{ - "gram": map[string]any{ - "noun": map[string]any{ - "file": map[string]any{ - "one": "file", - "other": "files", - "gender": "neuter", - }, - }, - }, - } - out := make(map[string]Message) - grammar := &GrammarData{ - Verbs: make(map[string]VerbForms), - Nouns: make(map[string]NounForms), - } - flattenWithGrammar("", data, out, grammar) - - assert.Contains(t, grammar.Nouns, "file") - assert.Equal(t, "file", grammar.Nouns["file"].One) - assert.Equal(t, "files", grammar.Nouns["file"].Other) - assert.Equal(t, "neuter", grammar.Nouns["file"].Gender) - }) - - t.Run("extracts articles", func(t *testing.T) { - data := map[string]any{ - "gram": map[string]any{ - "article": map[string]any{ - "indefinite": map[string]any{ - "default": "a", - "vowel": "an", - }, - "definite": "the", - }, - }, - } - out := make(map[string]Message) - grammar := &GrammarData{ - Verbs: make(map[string]VerbForms), - Nouns: make(map[string]NounForms), - } - flattenWithGrammar("", data, out, grammar) - - assert.Equal(t, "a", grammar.Articles.IndefiniteDefault) - assert.Equal(t, "an", grammar.Articles.IndefiniteVowel) - assert.Equal(t, "the", grammar.Articles.Definite) - }) - - t.Run("extracts punctuation rules", func(t *testing.T) { - data := map[string]any{ - "gram": map[string]any{ - "punct": map[string]any{ - "label": ":", - "progress": "...", - }, - }, - } - out := make(map[string]Message) - grammar := &GrammarData{ - Verbs: make(map[string]VerbForms), - Nouns: make(map[string]NounForms), - } - flattenWithGrammar("", data, out, grammar) - - assert.Equal(t, ":", grammar.Punct.LabelSuffix) - assert.Equal(t, "...", grammar.Punct.ProgressSuffix) - }) - - t.Run("nil grammar skips extraction", func(t *testing.T) { - data := map[string]any{ - "gram": map[string]any{ - "verb": map[string]any{ - "run": map[string]any{ - "past": "ran", - "gerund": "running", - }, - }, - }, - "simple": "text", - } - out := make(map[string]Message) - flattenWithGrammar("", data, out, nil) - - // Without grammar, verb forms are recursively processed as nested objects - assert.Contains(t, out, "simple") - assert.Equal(t, "text", out["simple"].Text) - }) -} - -func TestIsVerbFormObject(t *testing.T) { - tests := []struct { - name string - input map[string]any - expected bool - }{ - { - name: "has base only", - input: map[string]any{"base": "run"}, - expected: true, - }, - { - name: "has past only", - input: map[string]any{"past": "ran"}, - expected: true, - }, - { - name: "has gerund only", - input: map[string]any{"gerund": "running"}, - expected: true, - }, - { - name: "has all verb forms", - input: map[string]any{"base": "run", "past": "ran", "gerund": "running"}, - expected: true, - }, - { - name: "empty map", - input: map[string]any{}, - expected: false, - }, - { - name: "plural object not verb", - input: map[string]any{"one": "item", "other": "items"}, - expected: false, - }, - { - name: "unrelated keys", - input: map[string]any{"foo": "bar", "baz": "qux"}, - expected: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := isVerbFormObject(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestIsNounFormObject(t *testing.T) { - tests := []struct { - name string - input map[string]any - expected bool - }{ - { - name: "has gender", - input: map[string]any{"gender": "masculine", "one": "file", "other": "files"}, - expected: true, - }, - { - name: "gender only", - input: map[string]any{"gender": "feminine"}, - expected: true, - }, - { - name: "no gender", - input: map[string]any{"one": "item", "other": "items"}, - expected: false, - }, - { - name: "empty map", - input: map[string]any{}, - expected: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := isNounFormObject(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestHasPluralCategories(t *testing.T) { - tests := []struct { - name string - input map[string]any - expected bool - }{ - { - name: "has zero", - input: map[string]any{"zero": "none", "one": "one", "other": "many"}, - expected: true, - }, - { - name: "has two", - input: map[string]any{"one": "one", "two": "two", "other": "many"}, - expected: true, - }, - { - name: "has few", - input: map[string]any{"one": "one", "few": "few", "other": "many"}, - expected: true, - }, - { - name: "has many", - input: map[string]any{"one": "one", "many": "many", "other": "other"}, - expected: true, - }, - { - name: "has all categories", - input: map[string]any{"zero": "0", "one": "1", "two": "2", "few": "few", "many": "many", "other": "other"}, - expected: true, - }, - { - name: "only one and other", - input: map[string]any{"one": "item", "other": "items"}, - expected: false, - }, - { - name: "empty map", - input: map[string]any{}, - expected: false, - }, - { - name: "unrelated keys", - input: map[string]any{"foo": "bar"}, - expected: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := hasPluralCategories(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestIsPluralObject(t *testing.T) { - tests := []struct { - name string - input map[string]any - expected bool - }{ - { - name: "one and other", - input: map[string]any{"one": "item", "other": "items"}, - expected: true, - }, - { - name: "all CLDR categories", - input: map[string]any{"zero": "0", "one": "1", "two": "2", "few": "few", "many": "many", "other": "other"}, - expected: true, - }, - { - name: "only other", - input: map[string]any{"other": "items"}, - expected: true, - }, - { - name: "empty map", - input: map[string]any{}, - expected: false, - }, - { - name: "nested map is not plural", - input: map[string]any{"one": "item", "other": map[string]any{"nested": "value"}}, - expected: false, - }, - { - name: "unrelated keys", - input: map[string]any{"foo": "bar", "baz": "qux"}, - expected: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := isPluralObject(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestMessageIsPlural(t *testing.T) { - tests := []struct { - name string - msg Message - expected bool - }{ - { - name: "has zero", - msg: Message{Zero: "none"}, - expected: true, - }, - { - name: "has one", - msg: Message{One: "item"}, - expected: true, - }, - { - name: "has two", - msg: Message{Two: "items"}, - expected: true, - }, - { - name: "has few", - msg: Message{Few: "a few"}, - expected: true, - }, - { - name: "has many", - msg: Message{Many: "lots"}, - expected: true, - }, - { - name: "has other", - msg: Message{Other: "items"}, - expected: true, - }, - { - name: "has all", - msg: Message{Zero: "0", One: "1", Two: "2", Few: "few", Many: "many", Other: "other"}, - expected: true, - }, - { - name: "text only", - msg: Message{Text: "hello"}, - expected: false, - }, - { - name: "empty message", - msg: Message{}, - expected: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := tt.msg.IsPlural() - assert.Equal(t, tt.expected, result) - }) - } -} diff --git a/pkg/i18n/locales/de.json b/pkg/i18n/locales/de.json deleted file mode 100644 index 85e139a..0000000 --- a/pkg/i18n/locales/de.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "gram": { - "verb": { - "delete": { "base": "löschen", "past": "gelöscht", "gerund": "löschend" }, - "save": { "base": "speichern", "past": "gespeichert", "gerund": "speichernd" }, - "create": { "base": "erstellen", "past": "erstellt", "gerund": "erstellend" }, - "update": { "base": "aktualisieren", "past": "aktualisiert", "gerund": "aktualisierend" }, - "build": { "base": "bauen", "past": "gebaut", "gerund": "bauend" }, - "run": { "base": "laufen", "past": "gelaufen", "gerund": "laufend" }, - "check": { "base": "prüfen", "past": "geprüft", "gerund": "prüfend" }, - "install": { "base": "installieren", "past": "installiert", "gerund": "installierend" }, - "push": { "base": "pushen", "past": "gepusht", "gerund": "pushend" }, - "pull": { "base": "pullen", "past": "gepullt", "gerund": "pullend" }, - "commit": { "base": "committen", "past": "committet", "gerund": "committend" } - }, - "noun": { - "file": { "one": "Datei", "other": "Dateien", "gender": "feminine" }, - "repo": { "one": "Repository", "other": "Repositories", "gender": "neuter" }, - "commit": { "one": "Commit", "other": "Commits", "gender": "masculine" }, - "branch": { "one": "Branch", "other": "Branches", "gender": "masculine" }, - "change": { "one": "Änderung", "other": "Änderungen", "gender": "feminine" }, - "item": { "one": "Element", "other": "Elemente", "gender": "neuter" } - }, - "article": { - "indefinite": { "masculine": "ein", "feminine": "eine", "neuter": "ein" }, - "definite": { "masculine": "der", "feminine": "die", "neuter": "das" } - }, - "punct": { - "label": ":", - "progress": "..." - }, - "number": { - "thousands": ".", - "decimal": ",", - "percent": "%s %%" - } - }, - "prompt": { - "yes": "j", - "no": "n", - "continue": "Weiter?", - "proceed": "Fortfahren?", - "confirm": "Sind Sie sicher?" - }, - "time": { - "just_now": "gerade eben", - "ago": { - "second": { "one": "vor {{.Count}} Sekunde", "other": "vor {{.Count}} Sekunden" }, - "minute": { "one": "vor {{.Count}} Minute", "other": "vor {{.Count}} Minuten" }, - "hour": { "one": "vor {{.Count}} Stunde", "other": "vor {{.Count}} Stunden" }, - "day": { "one": "vor {{.Count}} Tag", "other": "vor {{.Count}} Tagen" }, - "week": { "one": "vor {{.Count}} Woche", "other": "vor {{.Count}} Wochen" } - } - }, - "cmd": { - "dev.short": "Multi-Repository-Entwicklung", - "doctor.short": "Entwicklungsumgebung prüfen" - }, - "error": { - "gh_not_found": "'gh' CLI nicht gefunden. Installieren von https://cli.github.com/" - }, - "lang": { - "de": "Deutsch", - "en": "Englisch", - "es": "Spanisch", - "fr": "Französisch", - "zh": "Chinesisch" - } -} diff --git a/pkg/i18n/locales/en_AU.json b/pkg/i18n/locales/en_AU.json deleted file mode 100644 index 2c63c08..0000000 --- a/pkg/i18n/locales/en_AU.json +++ /dev/null @@ -1,2 +0,0 @@ -{ -} diff --git a/pkg/i18n/locales/en_GB.json b/pkg/i18n/locales/en_GB.json deleted file mode 100644 index e03cd79..0000000 --- a/pkg/i18n/locales/en_GB.json +++ /dev/null @@ -1,485 +0,0 @@ -{ - "gram": { - "verb": { - "be": { "base": "be", "past": "was", "gerund": "being" }, - "go": { "base": "go", "past": "went", "gerund": "going" }, - "do": { "base": "do", "past": "did", "gerund": "doing" }, - "have": { "base": "have", "past": "had", "gerund": "having" }, - "make": { "base": "make", "past": "made", "gerund": "making" }, - "get": { "base": "get", "past": "got", "gerund": "getting" }, - "run": { "base": "run", "past": "ran", "gerund": "running" }, - "write": { "base": "write", "past": "wrote", "gerund": "writing" }, - "build": { "base": "build", "past": "built", "gerund": "building" }, - "send": { "base": "send", "past": "sent", "gerund": "sending" }, - "find": { "base": "find", "past": "found", "gerund": "finding" }, - "take": { "base": "take", "past": "took", "gerund": "taking" }, - "begin": { "base": "begin", "past": "began", "gerund": "beginning" }, - "keep": { "base": "keep", "past": "kept", "gerund": "keeping" }, - "hold": { "base": "hold", "past": "held", "gerund": "holding" }, - "bring": { "base": "bring", "past": "brought", "gerund": "bringing" }, - "think": { "base": "think", "past": "thought", "gerund": "thinking" }, - "buy": { "base": "buy", "past": "bought", "gerund": "buying" }, - "catch": { "base": "catch", "past": "caught", "gerund": "catching" }, - "choose": { "base": "choose", "past": "chose", "gerund": "choosing" }, - "lose": { "base": "lose", "past": "lost", "gerund": "losing" }, - "win": { "base": "win", "past": "won", "gerund": "winning" }, - "meet": { "base": "meet", "past": "met", "gerund": "meeting" }, - "lead": { "base": "lead", "past": "led", "gerund": "leading" }, - "leave": { "base": "leave", "past": "left", "gerund": "leaving" }, - "spend": { "base": "spend", "past": "spent", "gerund": "spending" }, - "pay": { "base": "pay", "past": "paid", "gerund": "paying" }, - "sell": { "base": "sell", "past": "sold", "gerund": "selling" }, - "commit": { "base": "commit", "past": "committed", "gerund": "committing" }, - "stop": { "base": "stop", "past": "stopped", "gerund": "stopping" }, - "scan": { "base": "scan", "past": "scanned", "gerund": "scanning" }, - "format": { "base": "format", "past": "formatted", "gerund": "formatting" }, - "set": { "base": "set", "past": "set", "gerund": "setting" }, - "put": { "base": "put", "past": "put", "gerund": "putting" }, - "cut": { "base": "cut", "past": "cut", "gerund": "cutting" }, - "hit": { "base": "hit", "past": "hit", "gerund": "hitting" }, - "sit": { "base": "sit", "past": "sat", "gerund": "sitting" }, - "split": { "base": "split", "past": "split", "gerund": "splitting" }, - "shut": { "base": "shut", "past": "shut", "gerund": "shutting" } - }, - "noun": { - "file": { "one": "file", "other": "files" }, - "repo": { "one": "repo", "other": "repos" }, - "repository": { "one": "repository", "other": "repositories" }, - "commit": { "one": "commit", "other": "commits" }, - "branch": { "one": "branch", "other": "branches" }, - "change": { "one": "change", "other": "changes" }, - "item": { "one": "item", "other": "items" }, - "issue": { "one": "issue", "other": "issues" }, - "task": { "one": "task", "other": "tasks" }, - "person": { "one": "person", "other": "people" }, - "child": { "one": "child", "other": "children" }, - "package": { "one": "package", "other": "packages" }, - "artifact": { "one": "artifact", "other": "artifacts" }, - "vulnerability": { "one": "vulnerability", "other": "vulnerabilities" }, - "dependency": { "one": "dependency", "other": "dependencies" }, - "directory": { "one": "directory", "other": "directories" }, - "category": { "one": "category", "other": "categories" }, - "query": { "one": "query", "other": "queries" } - }, - "article": { - "indefinite": { "default": "a", "vowel": "an" }, - "definite": "the" - }, - "word": { - "url": "URL", - "id": "ID", - "ok": "OK", - "ci": "CI", - "qa": "QA", - "php": "PHP", - "sdk": "SDK", - "html": "HTML", - "cgo": "CGO", - "pid": "PID", - "cpus": "CPUs", - "ssh": "SSH", - "ssl": "SSL", - "api": "API", - "pr": "PR", - "vite": "Vite", - "pnpm": "pnpm", - "app_url": "app URL", - "blocked_by": "blocked by", - "claimed_by": "claimed by", - "related_files": "related files", - "up_to_date": "up to date", - "dry_run": "dry run", - "go_mod": "go.mod" - }, - "punct": { - "label": ":", - "progress": "..." - }, - "number": { - "thousands": ",", - "decimal": ".", - "percent": "%s%%" - } - }, - "prompt": { - "yes": "y", - "no": "n", - "continue": "Continue?", - "proceed": "Proceed?", - "confirm": "Are you sure?", - "overwrite": "Overwrite?", - "discard": "Discard changes?" - }, - "time": { - "just_now": "just now", - "ago": { - "second": { "one": "{{.Count}} second ago", "other": "{{.Count}} seconds ago" }, - "minute": { "one": "{{.Count}} minute ago", "other": "{{.Count}} minutes ago" }, - "hour": { "one": "{{.Count}} hour ago", "other": "{{.Count}} hours ago" }, - "day": { "one": "{{.Count}} day ago", "other": "{{.Count}} days ago" }, - "week": { "one": "{{.Count}} week ago", "other": "{{.Count}} weeks ago" } - } - }, - "cmd": { - "ai": { - "short": "AI agent task management", - "claude.short": "Claude Code integration", - "task.short": "Show task details or auto-select a task", - "task.id_required": "task ID required (or use --auto)", - "task.no_pending": "No pending tasks available.", - "tasks.short": "List available tasks from core-agentic", - "task_commit.short": "Auto-commit changes with task reference", - "task_commit.no_changes": "No uncommitted changes to commit.", - "task_complete.short": "Mark a task as completed", - "task_pr.short": "Create a pull request for a task", - "task_pr.branch_error": "cannot create PR from {{.Branch}} branch; create a feature branch first", - "task_update.short": "Update task status or progress" - }, - "build": { - "short": "Build projects with auto-detection and cross-compilation", - "error.invalid_target": "invalid target format \"{{.Target}}\", expected OS/arch (e.g., linux/amd64)", - "error.no_project_type": "no supported project type detected in {{.Dir}}\nSupported types: go (go.mod), wails (wails.json), node (package.json), php (composer.json)", - "from_path.short": "Build from a local directory", - "pwa.short": "Build from a live PWA URL", - "sdk.short": "Generate API SDKs from OpenAPI spec" - }, - "ci": { - "short": "Publish releases (dry-run by default)", - "dry_run_hint": "(dry-run) use --we-are-go-for-launch to publish", - "go_for_launch": "GO FOR LAUNCH", - "init.short": "Initialize release configuration", - "changelog.short": "Generate changelog", - "version.short": "Show or set version" - }, - "dev": { - "short": "Multi-repo development workflow", - "no_changes": "No uncommitted changes found.", - "no_git_repos": "No git repositories found.", - "confirm_claude_commit": "Have Claude commit these repos?", - "health.short": "Quick health check across all repos", - "health.long": "Shows a summary of repository health across all repos in the workspace.", - "health.flag.verbose": "Show detailed breakdown", - "health.repos": "repos", - "health.to_push": "to push", - "health.to_pull": "to pull", - "health.errors": "errors", - "health.more": "+{{.Count}} more", - "health.dirty_label": "Dirty:", - "health.ahead_label": "Ahead:", - "health.behind_label": "Behind:", - "health.errors_label": "Errors:", - "status.clean": "clean", - "commit.short": "Claude-assisted commits across repos", - "push.short": "Push commits across all repos", - "push.diverged": "branch has diverged from remote", - "push.diverged_help": "Some repos have diverged (local and remote have different commits).", - "push.uncommitted_changes_commit": "You have uncommitted changes. Commit with Claude first?", - "pull.short": "Pull updates across all repos", - "work.short": "Multi-repo git operations", - "work.use_commit_flag": "Use --commit to have Claude create commits", - "issues.short": "List open issues across all repos", - "reviews.short": "List PRs needing review across all repos", - "ci.short": "Check CI status across all repos", - "impact.short": "Show impact of changing a repo", - "impact.requires_registry": "impact analysis requires repos.yaml with dependency information", - "sync.short": "Synchronizes public service APIs with internal implementations", - "vm.short": "Dev environment commands", - "vm.not_installed": "dev environment not installed (run 'core dev install' first)", - "vm.not_running": "Dev environment is not running", - "file_sync.short": "Sync files across repos (agent-safe)", - "file_sync.long": "Safely sync files or directories across multiple repositories with automatic pull/commit/push. Designed for AI agents to avoid common git pitfalls.", - "file_sync.flag.to": "Target repos pattern (e.g., packages/core-*)", - "file_sync.flag.message": "Commit message for the sync", - "file_sync.flag.co_author": "Co-author for commit (e.g., 'Name ')", - "file_sync.flag.dry_run": "Show what would be done without making changes", - "file_sync.flag.push": "Push after committing", - "file_sync.source": "Source", - "file_sync.targets": "Targets", - "file_sync.summary": "Summary", - "file_sync.no_changes": "no changes", - "file_sync.dry_run_mode": "(dry run)", - "file_sync.error.source_not_found": "Source not found: {{.Path}}", - "file_sync.error.no_targets": "No target repos matched the pattern", - "file_sync.error.no_registry": "No repos.yaml found", - "apply.short": "Run command or script across repos (agent-safe)", - "apply.long": "Run a command or script across multiple repositories with optional commit and push. Designed for AI agents to safely apply changes at scale.", - "apply.flag.command": "Shell command to run in each repo", - "apply.flag.script": "Script file to run in each repo", - "apply.flag.repos": "Comma-separated list of repo names (default: all)", - "apply.flag.commit": "Commit changes after running", - "apply.flag.message": "Commit message (required with --commit)", - "apply.flag.co_author": "Co-author for commit", - "apply.flag.dry_run": "Show what would be done without making changes", - "apply.flag.push": "Push after committing", - "apply.flag.continue": "Continue on error instead of stopping", - "apply.action": "Action", - "apply.targets": "Targets", - "apply.summary": "Summary", - "apply.no_changes": "no changes", - "apply.dry_run_mode": "(dry run)", - "apply.error.no_command": "Either --command or --script is required", - "apply.error.both_command_script": "Cannot use both --command and --script", - "apply.error.commit_needs_message": "--commit requires --message", - "apply.error.script_not_found": "Script not found: {{.Path}}", - "apply.error.no_repos": "No repos found", - "apply.error.no_registry": "No repos.yaml found", - "apply.error.command_failed": "Command failed (use --continue to skip failures)" - }, - "docs": { - "short": "Documentation management", - "list.short": "List documentation across repos", - "sync.short": "Sync documentation to core-php/docs/packages/" - }, - "doctor": { - "short": "Check development environment", - "ready": "Doctor: Environment ready", - "no_repos_yaml": "No repos.yaml found (run from workspace directory)", - "install_missing": "Install missing tools:", - "install_macos": "brew install git gh php composer node pnpm docker", - "ssh_missing": "SSH key missing - run: ssh-keygen && gh ssh-key add" - }, - "go": { - "short": "Go development tools", - "test.short": "Run Go tests", - "cov.short": "Run tests with coverage report", - "fmt.short": "Format Go code", - "lint.short": "Run golangci-lint", - "install.short": "Install Go binary", - "mod.short": "Module management", - "work.short": "Workspace management" - }, - "php": { - "short": "Laravel/PHP development tools", - "dev.short": "Start Laravel development environment", - "dev.press_ctrl_c": "Press Ctrl+C to stop all services", - "test.short": "Run PHP tests (PHPUnit/Pest)", - "fmt.short": "Format PHP code with Laravel Pint", - "analyse.short": "Run PHPStan static analysis", - "audit.short": "Security audit for dependencies", - "psalm.short": "Run Psalm static analysis", - "rector.short": "Automated code refactoring", - "infection.short": "Mutation testing for test quality", - "security.short": "Security vulnerability scanning", - "qa.short": "Run full QA pipeline", - "build.short": "Build Docker or LinuxKit image", - "deploy.short": "Deploy to Coolify", - "serve.short": "Run production container", - "ssl.short": "Setup SSL certificates with mkcert", - "packages.short": "Manage local PHP packages", - "ci.short": "Run CI/CD pipeline with combined reporting", - "ci.long": "Run all QA checks in optimal order and generate combined reports in JSON, markdown, or SARIF format for CI/CD integration.", - "ci.flag.json": "Output combined JSON report", - "ci.flag.summary": "Output markdown summary (for PR comments)", - "ci.flag.sarif": "Generate SARIF files for static analysis", - "ci.flag.upload_sarif": "Upload SARIF to GitHub Security tab", - "ci.flag.fail_on": "Severity level to fail on (critical, high, warning)" - }, - "pkg": { - "short": "Package management for core-* repos", - "install.short": "Clone a package from GitHub", - "list.short": "List installed packages", - "update.short": "Update installed packages", - "outdated.short": "Check for outdated packages", - "search.short": "Search GitHub for packages", - "error.invalid_repo_format": "invalid repo format: use org/repo (e.g., host-uk/core-php)" - }, - "sdk": { - "short": "SDK validation and API compatibility tools", - "diff.short": "Check for breaking API changes", - "validate.short": "Validate OpenAPI spec" - }, - "setup": { - "short": "Bootstrap workspace or clone packages from registry", - "complete": "Setup complete", - "bootstrap_mode": "Bootstrap mode (no repos.yaml found)", - "nothing_to_clone": "Nothing to clone.", - "wizard.select_packages": "Select packages to clone", - "wizard.what_to_do": "What would you like to do?", - "github.short": "Configure GitHub repos with org standards", - "github.long": "Configure GitHub repositories with organisation standards including labels, webhooks, branch protection, and security settings.", - "github.flag.repo": "Specific repo to setup", - "github.flag.all": "Setup all repos in registry", - "github.flag.labels": "Only sync labels", - "github.flag.webhooks": "Only sync webhooks", - "github.flag.protection": "Only sync branch protection", - "github.flag.security": "Only sync security settings", - "github.flag.check": "Dry-run: show what would change", - "github.flag.config": "Path to github.yaml config", - "github.dry_run_mode": "(dry run) no changes will be made", - "github.no_repos_specified": "No repos specified.", - "github.usage_hint": "Use --repo for a single repo, or --all for all repos", - "github.no_changes": "no changes needed", - "github.repos_checked": "Repos checked", - "github.repos_with_changes": "Repos with changes", - "github.all_up_to_date": "All repos are up to date", - "github.to_create": "To create", - "github.to_update": "To update", - "github.to_delete": "To delete", - "github.run_without_check": "Run without --check to apply changes", - "github.error.not_authenticated": "GitHub CLI not authenticated. Run: gh auth login", - "github.error.config_not_found": "GitHub config file not found", - "github.error.conflicting_flags": "Cannot use --repo and --all together" - }, - "security": { - "short": "Security alerts and vulnerability scanning", - "long": "View security alerts from Dependabot, code scanning, and secret scanning across repositories.", - "alerts.short": "List all security alerts across repos", - "alerts.long": "List security alerts from Dependabot, code scanning, and secret scanning. Aggregates alerts across all repos in the registry.", - "deps.short": "List Dependabot vulnerability alerts", - "deps.long": "List vulnerable dependencies detected by Dependabot with upgrade recommendations.", - "deps.flag.vulnerable": "Show only vulnerable dependencies", - "scan.short": "List code scanning alerts", - "scan.long": "List code scanning alerts from tools like CodeQL, Semgrep, etc.", - "scan.flag.tool": "Filter by tool name (e.g., codeql, semgrep)", - "secrets.short": "List exposed secrets", - "secrets.long": "List secrets detected by GitHub secret scanning.", - "flag.repo": "Specific repo to check", - "flag.severity": "Filter by severity (critical,high,medium,low)" - }, - "qa": { - "short": "Quality assurance workflows", - "long": "Quality assurance commands for verifying work - CI status, reviews, issues.", - "watch.short": "Watch GitHub Actions after a push", - "watch.long": "Monitor GitHub Actions workflow runs triggered by a commit, showing live progress and actionable failure details.", - "watch.flag.repo": "Repository to watch (default: current)", - "watch.flag.commit": "Commit SHA to watch (default: HEAD)", - "watch.flag.timeout": "Timeout duration (default: 10m)", - "watch.commit": "Commit:", - "watch.waiting_for_workflows": "Waiting for workflows to start...", - "watch.timeout": "Timeout after {{.Duration}} waiting for workflows", - "watch.workflows_failed": "{{.Count}} workflow(s) failed", - "watch.all_passed": "All workflows passed", - "watch.error.not_git_repo": "Not in a git repository", - "watch.error.repo_format": "Invalid repo format. Use --repo org/name or run from a git repo", - "review.short": "Check PR review status", - "review.long": "Show PR review status with actionable next steps. Answers: What do I need to do to get my PRs merged? What reviews am I blocking?", - "review.flag.mine": "Show only your open PRs", - "review.flag.requested": "Show only PRs where your review is requested", - "review.flag.repo": "Specific repository (default: current)", - "review.your_prs": "Your PRs", - "review.review_requested": "Review Requested", - "review.no_prs": "No open PRs", - "review.no_reviews": "No reviews requested", - "review.error.no_repo": "Not in a git repository. Use --repo to specify one", - "health.short": "Aggregate CI health across all repos", - "health.long": "Shows CI health summary across all repos with focus on problems that need attention.", - "health.flag.problems": "Show only repos with problems", - "health.summary": "CI Health", - "health.all_healthy": "All repos are healthy", - "health.passing": "Passing", - "health.tests_failing": "Tests failing", - "health.running": "Running", - "health.cancelled": "Cancelled", - "health.skipped": "Skipped", - "health.no_ci_configured": "No CI configured", - "health.workflow_disabled": "Workflow disabled", - "health.fetch_error": "Failed to fetch status", - "health.parse_error": "Failed to parse response", - "health.count_passing": "Passing", - "health.count_failing": "Failing", - "health.count_pending": "Pending", - "health.count_no_ci": "No CI", - "health.count_disabled": "Disabled", - "issues.short": "Intelligent issue triage", - "issues.long": "Show prioritised, actionable issues across all repos. Groups by: needs response, ready to work, blocked, and needs triage.", - "issues.flag.mine": "Show only issues assigned to you", - "issues.flag.triage": "Show only issues needing triage", - "issues.flag.blocked": "Show only blocked issues", - "issues.flag.limit": "Maximum issues per repo", - "issues.fetching": "Fetching...", - "issues.no_issues": "No open issues found", - "issues.category.needs_response": "Needs Response", - "issues.category.ready": "Ready to Work", - "issues.category.blocked": "Blocked", - "issues.category.triage": "Needs Triage", - "issues.hint.needs_response": "commented recently", - "issues.hint.blocked": "Waiting on dependency", - "issues.hint.triage": "Add labels and assignee" - }, - "test": { - "short": "Run Go tests with coverage" - }, - "vm": { - "short": "LinuxKit VM management", - "run.short": "Run a LinuxKit image or template", - "ps.short": "List running VMs", - "stop.short": "Stop a running VM", - "logs.short": "View VM logs", - "exec.short": "Execute a command in a VM", - "templates.short": "Manage LinuxKit templates" - }, - "monitor": { - "short": "Aggregate security findings from GitHub", - "long": "Monitor GitHub Security Tab, Dependabot, and secret scanning for actionable findings. Aggregates results from free tier scanners (Semgrep, Trivy, Gitleaks, OSV-Scanner, Checkov, CodeQL).", - "flag.repo": "Specific repository to scan", - "flag.severity": "Filter by severity (critical, high, medium, low)", - "flag.json": "Output as JSON for piping to other tools", - "flag.all": "Scan all repos in registry", - "scanning": "Scanning", - "found": "Found", - "no_findings": "No security findings", - "error.no_repos": "No repositories to scan. Use --repo, --all, or run from a git repo", - "error.not_git_repo": "Not in a git repository. Use --repo to specify one" - } - }, - "common": { - "status": { - "dirty": "dirty", - "clean": "clean", - "synced": "synced", - "up_to_date": "up to date" - }, - "label": { - "done": "Done", - "error": "Error", - "warning": "Warning", - "info": "Info", - "fix": "Fix:", - "install": "Install:", - "summary": "Summary:", - "setup": "Setup:", - "config": "Config:", - "repo": "Repo:" - }, - "flag": { - "fix": "Auto-fix issues where possible", - "diff": "Show diff of changes", - "json": "Output as JSON", - "verbose": "Show detailed output", - "registry": "Path to repos.yaml registry file" - }, - "progress": { - "running": "Running {{.Task}}...", - "checking": "Checking {{.Item}}..." - }, - "result": { - "no_issues": "No issues found" - }, - "success": { - "completed": "{{.Action}} successfully" - }, - "error": { - "failed": "Failed to {{.Action}}" - }, - "hint": { - "fix_deps": "Update dependencies to fix vulnerabilities" - }, - "count": { - "succeeded": "{{.Count}} succeeded", - "failed": "{{.Count}} failed", - "skipped": "{{.Count}} skipped" - } - }, - "error": { - "gh_not_found": "'gh' CLI not found. Install from https://cli.github.com/", - "registry_not_found": "No repos.yaml found", - "repo_not_found": "Repository '{{.Name}}' not found" - }, - "lang": { - "de": "German", - "en": "English", - "es": "Spanish", - "fr": "French", - "zh": "Chinese" - } -} diff --git a/pkg/i18n/locales/en_US.json b/pkg/i18n/locales/en_US.json deleted file mode 100644 index 04e4683..0000000 --- a/pkg/i18n/locales/en_US.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "gram": { - "verb": { - "analyse": { "base": "analyze", "past": "analyzed", "gerund": "analyzing" }, - "organise": { "base": "organize", "past": "organized", "gerund": "organizing" }, - "recognise": { "base": "recognize", "past": "recognized", "gerund": "recognizing" }, - "realise": { "base": "realize", "past": "realized", "gerund": "realizing" } - } - } -} diff --git a/pkg/i18n/localise.go b/pkg/i18n/localise.go deleted file mode 100644 index d82d293..0000000 --- a/pkg/i18n/localise.go +++ /dev/null @@ -1,66 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -import ( - "os" - "strings" - - "golang.org/x/text/language" -) - -// SetFormality sets the default formality level on the default service. -// Does nothing if the service is not initialized. -// -// SetFormality(FormalityFormal) // Use formal address (Sie, vous) -func SetFormality(f Formality) { - if svc := Default(); svc != nil { - svc.SetFormality(f) - } -} - -// Direction returns the text direction for the current language. -func Direction() TextDirection { - if svc := Default(); svc != nil { - return svc.Direction() - } - return DirLTR -} - -// IsRTL returns true if the current language uses right-to-left text. -func IsRTL() bool { - return Direction() == DirRTL -} - -func detectLanguage(supported []language.Tag) string { - langEnv := os.Getenv("LANG") - if langEnv == "" { - langEnv = os.Getenv("LC_ALL") - if langEnv == "" { - langEnv = os.Getenv("LC_MESSAGES") - } - } - if langEnv == "" { - return "" - } - - // Parse LANG format: en_GB.UTF-8 -> en-GB - baseLang := strings.Split(langEnv, ".")[0] - baseLang = strings.ReplaceAll(baseLang, "_", "-") - - parsedLang, err := language.Parse(baseLang) - if err != nil { - return "" - } - - if len(supported) == 0 { - return "" - } - - matcher := language.NewMatcher(supported) - bestMatch, _, confidence := matcher.Match(parsedLang) - - if confidence >= language.Low { - return bestMatch.String() - } - return "" -} diff --git a/pkg/i18n/mode_test.go b/pkg/i18n/mode_test.go deleted file mode 100644 index a57f4d1..0000000 --- a/pkg/i18n/mode_test.go +++ /dev/null @@ -1,161 +0,0 @@ -package i18n - -import ( - "sync" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestMode_String(t *testing.T) { - tests := []struct { - mode Mode - expected string - }{ - {ModeNormal, "normal"}, - {ModeStrict, "strict"}, - {ModeCollect, "collect"}, - {Mode(99), "unknown"}, - } - - for _, tt := range tests { - t.Run(tt.expected, func(t *testing.T) { - assert.Equal(t, tt.expected, tt.mode.String()) - }) - } -} - -func TestMissingKey(t *testing.T) { - mk := MissingKey{ - Key: "test.missing.key", - Args: map[string]any{"Name": "test"}, - CallerFile: "/path/to/file.go", - CallerLine: 42, - } - - assert.Equal(t, "test.missing.key", mk.Key) - assert.Equal(t, "test", mk.Args["Name"]) - assert.Equal(t, "/path/to/file.go", mk.CallerFile) - assert.Equal(t, 42, mk.CallerLine) -} - -func TestOnMissingKey(t *testing.T) { - // Reset handler after test - defer OnMissingKey(nil) - - t.Run("sets handler", func(t *testing.T) { - var received MissingKey - OnMissingKey(func(mk MissingKey) { - received = mk - }) - - dispatchMissingKey("test.key", map[string]any{"foo": "bar"}) - - assert.Equal(t, "test.key", received.Key) - assert.Equal(t, "bar", received.Args["foo"]) - }) - - t.Run("nil handler", func(t *testing.T) { - OnMissingKey(nil) - // Should not panic - dispatchMissingKey("test.key", nil) - }) - - t.Run("replaces previous handler", func(t *testing.T) { - called1 := false - called2 := false - - OnMissingKey(func(mk MissingKey) { - called1 = true - }) - OnMissingKey(func(mk MissingKey) { - called2 = true - }) - - dispatchMissingKey("test.key", nil) - - assert.False(t, called1) - assert.True(t, called2) - }) -} - -func TestServiceMode(t *testing.T) { - // Reset default service after tests - originalService := defaultService.Load() - defer func() { - defaultService.Store(originalService) - }() - - t.Run("default mode is normal", func(t *testing.T) { - defaultService.Store(nil) - defaultOnce = sync.Once{} - defaultErr = nil - - svc, err := New() - require.NoError(t, err) - - assert.Equal(t, ModeNormal, svc.Mode()) - }) - - t.Run("set mode", func(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - svc.SetMode(ModeStrict) - assert.Equal(t, ModeStrict, svc.Mode()) - - svc.SetMode(ModeCollect) - assert.Equal(t, ModeCollect, svc.Mode()) - - svc.SetMode(ModeNormal) - assert.Equal(t, ModeNormal, svc.Mode()) - }) -} - -func TestModeNormal_MissingKey(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - svc.SetMode(ModeNormal) - - // Missing key should return the key itself - result := svc.T("nonexistent.key") - assert.Equal(t, "nonexistent.key", result) -} - -func TestModeStrict_MissingKey(t *testing.T) { - svc, err := New() - require.NoError(t, err) - - svc.SetMode(ModeStrict) - - // Missing key should panic - assert.Panics(t, func() { - svc.T("nonexistent.key") - }) -} - -func TestModeCollect_MissingKey(t *testing.T) { - // Reset handler after test - defer OnMissingKey(nil) - - svc, err := New() - require.NoError(t, err) - - svc.SetMode(ModeCollect) - - var received MissingKey - OnMissingKey(func(mk MissingKey) { - received = mk - }) - - // Missing key should dispatch action and return [key] - result := svc.T("nonexistent.key", map[string]any{"arg": "value"}) - - assert.Equal(t, "[nonexistent.key]", result) - assert.Equal(t, "nonexistent.key", received.Key) - assert.Equal(t, "value", received.Args["arg"]) - assert.NotEmpty(t, received.CallerFile) - assert.Greater(t, received.CallerLine, 0) -} diff --git a/pkg/i18n/numbers.go b/pkg/i18n/numbers.go deleted file mode 100644 index a35baf5..0000000 --- a/pkg/i18n/numbers.go +++ /dev/null @@ -1,223 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -import ( - "fmt" - "math" - "strconv" - "strings" -) - -// getNumberFormat returns the number format for the current language. -func getNumberFormat() NumberFormat { - lang := currentLangForGrammar() - // Extract base language (en-GB → en) - if idx := strings.IndexAny(lang, "-_"); idx > 0 { - lang = lang[:idx] - } - if fmt, ok := numberFormats[lang]; ok { - return fmt - } - return numberFormats["en"] // fallback -} - -// FormatNumber formats an integer with locale-specific thousands separators. -// -// FormatNumber(1234567) // "1,234,567" (en) or "1.234.567" (de) -func FormatNumber(n int64) string { - nf := getNumberFormat() - return formatIntWithSep(n, nf.ThousandsSep) -} - -// FormatDecimal formats a float with locale-specific separators. -// Uses up to 2 decimal places, trimming trailing zeros. -// -// FormatDecimal(1234.5) // "1,234.5" (en) or "1.234,5" (de) -// FormatDecimal(1234.00) // "1,234" (en) or "1.234" (de) -func FormatDecimal(f float64) string { - return FormatDecimalN(f, 2) -} - -// FormatDecimalN formats a float with N decimal places. -// -// FormatDecimalN(1234.5678, 3) // "1,234.568" (en) -func FormatDecimalN(f float64, decimals int) string { - nf := getNumberFormat() - - // Split into integer and fractional parts - intPart := int64(f) - fracPart := math.Abs(f - float64(intPart)) - - // Format integer part with thousands separator - intStr := formatIntWithSep(intPart, nf.ThousandsSep) - - // Format fractional part - if decimals <= 0 || fracPart == 0 { - return intStr - } - - // Round and format fractional part - multiplier := math.Pow(10, float64(decimals)) - fracInt := int64(math.Round(fracPart * multiplier)) - - if fracInt == 0 { - return intStr - } - - // Format with leading zeros, then trim trailing zeros - fracStr := fmt.Sprintf("%0*d", decimals, fracInt) - fracStr = strings.TrimRight(fracStr, "0") - - return intStr + nf.DecimalSep + fracStr -} - -// FormatPercent formats a decimal as a percentage. -// -// FormatPercent(0.85) // "85%" (en) or "85 %" (de) -// FormatPercent(0.333) // "33.3%" (en) -// FormatPercent(1.5) // "150%" (en) -func FormatPercent(f float64) string { - nf := getNumberFormat() - pct := f * 100 - - // Format the number part - var numStr string - if pct == float64(int64(pct)) { - numStr = strconv.FormatInt(int64(pct), 10) - } else { - numStr = FormatDecimalN(pct, 1) - } - - return fmt.Sprintf(nf.PercentFmt, numStr) -} - -// FormatBytes formats bytes as human-readable size. -// -// FormatBytes(1536) // "1.5 KB" -// FormatBytes(1536000) // "1.5 MB" -// FormatBytes(1536000000) // "1.4 GB" -func FormatBytes(bytes int64) string { - const ( - KB = 1024 - MB = KB * 1024 - GB = MB * 1024 - TB = GB * 1024 - ) - - nf := getNumberFormat() - - var value float64 - var unit string - - switch { - case bytes >= TB: - value = float64(bytes) / TB - unit = "TB" - case bytes >= GB: - value = float64(bytes) / GB - unit = "GB" - case bytes >= MB: - value = float64(bytes) / MB - unit = "MB" - case bytes >= KB: - value = float64(bytes) / KB - unit = "KB" - default: - return fmt.Sprintf("%d B", bytes) - } - - // Format with 1 decimal place, trim .0 - intPart := int64(value) - fracPart := value - float64(intPart) - - if fracPart < 0.05 { - return fmt.Sprintf("%d %s", intPart, unit) - } - - fracDigit := int(math.Round(fracPart * 10)) - if fracDigit == 10 { - return fmt.Sprintf("%d %s", intPart+1, unit) - } - - return fmt.Sprintf("%d%s%d %s", intPart, nf.DecimalSep, fracDigit, unit) -} - -// FormatOrdinal formats a number as an ordinal. -// -// FormatOrdinal(1) // "1st" (en) or "1." (de) -// FormatOrdinal(2) // "2nd" (en) or "2." (de) -// FormatOrdinal(3) // "3rd" (en) or "3." (de) -// FormatOrdinal(11) // "11th" (en) or "11." (de) -func FormatOrdinal(n int) string { - lang := currentLangForGrammar() - // Extract base language - if idx := strings.IndexAny(lang, "-_"); idx > 0 { - lang = lang[:idx] - } - - // Most languages just use number + period - switch lang { - case "en": - return formatEnglishOrdinal(n) - default: - return fmt.Sprintf("%d.", n) - } -} - -// formatEnglishOrdinal returns English ordinal suffix. -func formatEnglishOrdinal(n int) string { - abs := n - if abs < 0 { - abs = -abs - } - - // Special cases for 11, 12, 13 - if abs%100 >= 11 && abs%100 <= 13 { - return fmt.Sprintf("%dth", n) - } - - switch abs % 10 { - case 1: - return fmt.Sprintf("%dst", n) - case 2: - return fmt.Sprintf("%dnd", n) - case 3: - return fmt.Sprintf("%drd", n) - default: - return fmt.Sprintf("%dth", n) - } -} - -// formatIntWithSep formats an integer with thousands separator. -func formatIntWithSep(n int64, sep string) string { - if sep == "" { - return strconv.FormatInt(n, 10) - } - - negative := n < 0 - if negative { - n = -n - } - - str := strconv.FormatInt(n, 10) - if len(str) <= 3 { - if negative { - return "-" + str - } - return str - } - - // Insert separators from right to left - var result strings.Builder - for i, c := range str { - if i > 0 && (len(str)-i)%3 == 0 { - result.WriteString(sep) - } - result.WriteRune(c) - } - - if negative { - return "-" + result.String() - } - return result.String() -} diff --git a/pkg/i18n/numbers_test.go b/pkg/i18n/numbers_test.go deleted file mode 100644 index 4f2e6b3..0000000 --- a/pkg/i18n/numbers_test.go +++ /dev/null @@ -1,173 +0,0 @@ -package i18n - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestFormatNumber(t *testing.T) { - svc, err := New() - require.NoError(t, err) - SetDefault(svc) - - tests := []struct { - name string - input int64 - expected string - }{ - {"zero", 0, "0"}, - {"small", 123, "123"}, - {"thousands", 1234, "1,234"}, - {"millions", 1234567, "1,234,567"}, - {"negative", -1234567, "-1,234,567"}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := FormatNumber(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestFormatDecimal(t *testing.T) { - svc, err := New() - require.NoError(t, err) - SetDefault(svc) - - tests := []struct { - name string - input float64 - expected string - }{ - {"integer", 1234.0, "1,234"}, - {"one decimal", 1234.5, "1,234.5"}, - {"two decimals", 1234.56, "1,234.56"}, - {"trailing zeros", 1234.50, "1,234.5"}, - {"small", 0.5, "0.5"}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := FormatDecimal(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestFormatPercent(t *testing.T) { - svc, err := New() - require.NoError(t, err) - SetDefault(svc) - - tests := []struct { - name string - input float64 - expected string - }{ - {"whole", 0.85, "85%"}, - {"decimal", 0.333, "33.3%"}, - {"over 100", 1.5, "150%"}, - {"zero", 0.0, "0%"}, - {"one", 1.0, "100%"}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := FormatPercent(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestFormatBytes(t *testing.T) { - svc, err := New() - require.NoError(t, err) - SetDefault(svc) - - tests := []struct { - name string - input int64 - expected string - }{ - {"bytes", 500, "500 B"}, - {"KB", 1536, "1.5 KB"}, - {"MB", 1572864, "1.5 MB"}, - {"GB", 1610612736, "1.5 GB"}, - {"exact KB", 1024, "1 KB"}, - {"exact MB", 1048576, "1 MB"}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := FormatBytes(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestFormatOrdinal(t *testing.T) { - svc, err := New() - require.NoError(t, err) - SetDefault(svc) - - tests := []struct { - name string - input int - expected string - }{ - {"1st", 1, "1st"}, - {"2nd", 2, "2nd"}, - {"3rd", 3, "3rd"}, - {"4th", 4, "4th"}, - {"11th", 11, "11th"}, - {"12th", 12, "12th"}, - {"13th", 13, "13th"}, - {"21st", 21, "21st"}, - {"22nd", 22, "22nd"}, - {"23rd", 23, "23rd"}, - {"100th", 100, "100th"}, - {"101st", 101, "101st"}, - {"111th", 111, "111th"}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := FormatOrdinal(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestI18nNumberNamespace(t *testing.T) { - svc, err := New() - require.NoError(t, err) - SetDefault(svc) - - t.Run("i18n.numeric.number", func(t *testing.T) { - result := svc.T("i18n.numeric.number", 1234567) - assert.Equal(t, "1,234,567", result) - }) - - t.Run("i18n.numeric.decimal", func(t *testing.T) { - result := svc.T("i18n.numeric.decimal", 1234.56) - assert.Equal(t, "1,234.56", result) - }) - - t.Run("i18n.numeric.percent", func(t *testing.T) { - result := svc.T("i18n.numeric.percent", 0.85) - assert.Equal(t, "85%", result) - }) - - t.Run("i18n.numeric.bytes", func(t *testing.T) { - result := svc.T("i18n.numeric.bytes", 1572864) - assert.Equal(t, "1.5 MB", result) - }) - - t.Run("i18n.numeric.ordinal", func(t *testing.T) { - result := svc.T("i18n.numeric.ordinal", 3) - assert.Equal(t, "3rd", result) - }) -} diff --git a/pkg/i18n/service.go b/pkg/i18n/service.go deleted file mode 100644 index 91d1181..0000000 --- a/pkg/i18n/service.go +++ /dev/null @@ -1,635 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -import ( - "embed" - "encoding/json" - "fmt" - "io/fs" - "path" - "strings" - "sync" - "sync/atomic" - - "golang.org/x/text/language" -) - -// Service provides internationalization and localization. -type Service struct { - loader Loader // Source for loading translations - messages map[string]map[string]Message // lang -> key -> message - currentLang string - fallbackLang string - availableLangs []language.Tag - mode Mode // Translation mode (Normal, Strict, Collect) - debug bool // Debug mode shows key prefixes - formality Formality // Default formality level for translations - handlers []KeyHandler // Handler chain for dynamic key patterns - mu sync.RWMutex -} - -// Option configures a Service during construction. -type Option func(*Service) - -// WithFallback sets the fallback language for missing translations. -func WithFallback(lang string) Option { - return func(s *Service) { - s.fallbackLang = lang - } -} - -// WithFormality sets the default formality level. -func WithFormality(f Formality) Option { - return func(s *Service) { - s.formality = f - } -} - -// WithHandlers sets custom handlers (replaces default handlers). -func WithHandlers(handlers ...KeyHandler) Option { - return func(s *Service) { - s.handlers = handlers - } -} - -// WithDefaultHandlers adds the default i18n.* namespace handlers. -// Use this after WithHandlers to add defaults back, or to ensure defaults are present. -func WithDefaultHandlers() Option { - return func(s *Service) { - s.handlers = append(s.handlers, DefaultHandlers()...) - } -} - -// WithMode sets the translation mode. -func WithMode(m Mode) Option { - return func(s *Service) { - s.mode = m - } -} - -// WithDebug enables or disables debug mode. -func WithDebug(enabled bool) Option { - return func(s *Service) { - s.debug = enabled - } -} - -// Default is the global i18n service instance. -var ( - defaultService atomic.Pointer[Service] - defaultOnce sync.Once - defaultErr error -) - -//go:embed locales/*.json -var localeFS embed.FS - -// Ensure Service implements Translator at compile time. -var _ Translator = (*Service)(nil) - -// New creates a new i18n service with embedded locales and default options. -func New(opts ...Option) (*Service, error) { - return NewWithLoader(NewFSLoader(localeFS, "locales"), opts...) -} - -// NewWithFS creates a new i18n service loading locales from the given filesystem. -func NewWithFS(fsys fs.FS, dir string, opts ...Option) (*Service, error) { - return NewWithLoader(NewFSLoader(fsys, dir), opts...) -} - -// NewWithLoader creates a new i18n service with a custom loader. -// Use this for custom storage backends (database, remote API, etc.). -// -// loader := NewFSLoader(customFS, "translations") -// svc, err := NewWithLoader(loader, WithFallback("de-DE")) -func NewWithLoader(loader Loader, opts ...Option) (*Service, error) { - s := &Service{ - loader: loader, - messages: make(map[string]map[string]Message), - fallbackLang: "en-GB", - handlers: DefaultHandlers(), - } - - // Apply options - for _, opt := range opts { - opt(s) - } - - // Load all available languages - langs := loader.Languages() - if len(langs) == 0 { - return nil, fmt.Errorf("no languages available from loader") - } - - for _, lang := range langs { - messages, grammar, err := loader.Load(lang) - if err != nil { - return nil, fmt.Errorf("failed to load locale %q: %w", lang, err) - } - - s.messages[lang] = messages - if grammar != nil && (len(grammar.Verbs) > 0 || len(grammar.Nouns) > 0 || len(grammar.Words) > 0) { - SetGrammarData(lang, grammar) - } - - tag := language.Make(lang) - s.availableLangs = append(s.availableLangs, tag) - } - - // Try to detect system language - if detected := detectLanguage(s.availableLangs); detected != "" { - s.currentLang = detected - } else { - s.currentLang = s.fallbackLang - } - - return s, nil -} - -// Init initializes the default global service. -func Init() error { - defaultOnce.Do(func() { - svc, err := New() - if err == nil { - defaultService.Store(svc) - // Load any locales registered by packages before Init was called - loadRegisteredLocales(svc) - } - defaultErr = err - }) - return defaultErr -} - -// Default returns the global i18n service, initializing if needed. -// Thread-safe: can be called concurrently. -func Default() *Service { - _ = Init() // sync.Once handles idempotency - return defaultService.Load() -} - -// SetDefault sets the global i18n service. -// Thread-safe: can be called concurrently with Default(). -// Panics if s is nil. -func SetDefault(s *Service) { - if s == nil { - panic("i18n: SetDefault called with nil service") - } - defaultService.Store(s) -} - -// loadJSON parses nested JSON and flattens to dot-notation keys. -// Also extracts grammar data (verbs, nouns, articles) for the language. -// If messages already exist for the language, new messages are merged in. -func (s *Service) loadJSON(lang string, data []byte) error { - var raw map[string]any - if err := json.Unmarshal(data, &raw); err != nil { - return err - } - - messages := make(map[string]Message) - grammarData := &GrammarData{ - Verbs: make(map[string]VerbForms), - Nouns: make(map[string]NounForms), - Words: make(map[string]string), - } - - flattenWithGrammar("", raw, messages, grammarData) - - // Merge new messages into existing (or create new map) - if existing, ok := s.messages[lang]; ok { - for key, msg := range messages { - existing[key] = msg - } - } else { - s.messages[lang] = messages - } - - // Store grammar data if any was found - if len(grammarData.Verbs) > 0 || len(grammarData.Nouns) > 0 || len(grammarData.Words) > 0 { - SetGrammarData(lang, grammarData) - } - - return nil -} - -// SetLanguage sets the language for translations. -func (s *Service) SetLanguage(lang string) error { - s.mu.Lock() - defer s.mu.Unlock() - - requestedLang, err := language.Parse(lang) - if err != nil { - return fmt.Errorf("invalid language tag %q: %w", lang, err) - } - - if len(s.availableLangs) == 0 { - return fmt.Errorf("no languages available") - } - - matcher := language.NewMatcher(s.availableLangs) - bestMatch, _, confidence := matcher.Match(requestedLang) - - if confidence == language.No { - return fmt.Errorf("unsupported language: %q", lang) - } - - s.currentLang = bestMatch.String() - return nil -} - -// Language returns the current language code. -func (s *Service) Language() string { - s.mu.RLock() - defer s.mu.RUnlock() - return s.currentLang -} - -// AvailableLanguages returns the list of available language codes. -func (s *Service) AvailableLanguages() []string { - s.mu.RLock() - defer s.mu.RUnlock() - - langs := make([]string, len(s.availableLangs)) - for i, tag := range s.availableLangs { - langs[i] = tag.String() - } - return langs -} - -// SetMode sets the translation mode for missing key handling. -func (s *Service) SetMode(m Mode) { - s.mu.Lock() - defer s.mu.Unlock() - s.mode = m -} - -// Mode returns the current translation mode. -func (s *Service) Mode() Mode { - s.mu.RLock() - defer s.mu.RUnlock() - return s.mode -} - -// SetFormality sets the default formality level for translations. -// This affects languages that distinguish formal/informal address (Sie/du, vous/tu). -// -// svc.SetFormality(FormalityFormal) // Use formal address -func (s *Service) SetFormality(f Formality) { - s.mu.Lock() - defer s.mu.Unlock() - s.formality = f -} - -// Formality returns the current formality level. -func (s *Service) Formality() Formality { - s.mu.RLock() - defer s.mu.RUnlock() - return s.formality -} - -// Direction returns the text direction for the current language. -func (s *Service) Direction() TextDirection { - s.mu.RLock() - defer s.mu.RUnlock() - if IsRTLLanguage(s.currentLang) { - return DirRTL - } - return DirLTR -} - -// IsRTL returns true if the current language uses right-to-left text direction. -func (s *Service) IsRTL() bool { - return s.Direction() == DirRTL -} - -// PluralCategory returns the plural category for a count in the current language. -func (s *Service) PluralCategory(n int) PluralCategory { - s.mu.RLock() - defer s.mu.RUnlock() - return GetPluralCategory(s.currentLang, n) -} - -// AddHandler appends a handler to the end of the handler chain. -// Later handlers have lower priority (run if earlier handlers don't match). -// -// Note: Handlers are executed during T() while holding a read lock. -// Handlers should not call back into the same Service instance to avoid -// contention. Grammar functions like PastTense() use currentLangForGrammar() -// which safely calls Default().Language(). -func (s *Service) AddHandler(h KeyHandler) { - s.mu.Lock() - defer s.mu.Unlock() - s.handlers = append(s.handlers, h) -} - -// PrependHandler inserts a handler at the start of the handler chain. -// Prepended handlers have highest priority (run first). -func (s *Service) PrependHandler(h KeyHandler) { - s.mu.Lock() - defer s.mu.Unlock() - s.handlers = append([]KeyHandler{h}, s.handlers...) -} - -// ClearHandlers removes all handlers from the chain. -// Useful for testing or disabling all i18n.* magic. -func (s *Service) ClearHandlers() { - s.mu.Lock() - defer s.mu.Unlock() - s.handlers = nil -} - -// Handlers returns a copy of the current handler chain. -func (s *Service) Handlers() []KeyHandler { - s.mu.RLock() - defer s.mu.RUnlock() - result := make([]KeyHandler, len(s.handlers)) - copy(result, s.handlers) - return result -} - -// T translates a message by its ID with handler chain support. -// -// # i18n Namespace Magic -// -// The i18n.* namespace provides auto-composed grammar shortcuts: -// -// T("i18n.label.status") // → "Status:" -// T("i18n.progress.build") // → "Building..." -// T("i18n.progress.check", "config") // → "Checking config..." -// T("i18n.count.file", 5) // → "5 files" -// T("i18n.done.delete", "file") // → "File deleted" -// T("i18n.fail.delete", "file") // → "Failed to delete file" -// -// For semantic intents, pass a Subject: -// -// T("core.delete", S("file", "config.yaml")) // → "Delete config.yaml?" -// -// Use Raw() for direct key lookup without handler chain processing. -func (s *Service) T(messageID string, args ...any) string { - s.mu.RLock() - defer s.mu.RUnlock() - - // Run handler chain - handlers can intercept and process keys - result := RunHandlerChain(s.handlers, messageID, args, func() string { - // Fallback: standard message lookup - var data any - if len(args) > 0 { - data = args[0] - } - text := s.resolveWithFallback(messageID, data) - if text == "" { - return s.handleMissingKey(messageID, args) - } - return text - }) - - // Debug mode: prefix with key - if s.debug { - return debugFormat(messageID, result) - } - - return result -} - -// resolveWithFallback implements the fallback chain for message resolution. -// Must be called with s.mu.RLock held. -func (s *Service) resolveWithFallback(messageID string, data any) string { - // 1. Try exact key in current language - if text := s.tryResolve(s.currentLang, messageID, data); text != "" { - return text - } - - // 2. Try exact key in fallback language - if text := s.tryResolve(s.fallbackLang, messageID, data); text != "" { - return text - } - - // 3. Try fallback patterns for intent-like keys - if strings.Contains(messageID, ".") { - parts := strings.Split(messageID, ".") - verb := parts[len(parts)-1] - - // Try common.action.{verb} - commonKey := "common.action." + verb - if text := s.tryResolve(s.currentLang, commonKey, data); text != "" { - return text - } - if text := s.tryResolve(s.fallbackLang, commonKey, data); text != "" { - return text - } - - // Try common.{verb} - commonKey = "common." + verb - if text := s.tryResolve(s.currentLang, commonKey, data); text != "" { - return text - } - if text := s.tryResolve(s.fallbackLang, commonKey, data); text != "" { - return text - } - } - - return "" -} - -// tryResolve attempts to resolve a single key in a single language. -// Returns empty string if not found. -// Must be called with s.mu.RLock held. -func (s *Service) tryResolve(lang, key string, data any) string { - // Determine effective formality - formality := s.getEffectiveFormality(data) - - // Try formality-specific key first (key._formal or key._informal) - if formality != FormalityNeutral { - formalityKey := key + "._" + formality.String() - if text := s.resolveMessage(lang, formalityKey, data); text != "" { - return text - } - } - - // Fall back to base key - return s.resolveMessage(lang, key, data) -} - -// resolveMessage resolves a single message key without formality fallback. -// Must be called with s.mu.RLock held. -func (s *Service) resolveMessage(lang, key string, data any) string { - msg, ok := s.getMessage(lang, key) - if !ok { - return "" - } - - text := msg.Text - if msg.IsPlural() { - count := getCount(data) - category := GetPluralCategory(lang, count) - text = msg.ForCategory(category) - } - - if text == "" { - return "" - } - - // Apply template if we have data - if data != nil { - text = applyTemplate(text, data) - } - - return text -} - -// getEffectiveFormality returns the formality to use for translation. -// Priority: TranslationContext > Subject > map["Formality"] > Service.formality -// Must be called with s.mu.RLock held. -func (s *Service) getEffectiveFormality(data any) Formality { - // Check if data is a TranslationContext with explicit formality - if ctx, ok := data.(*TranslationContext); ok && ctx != nil { - if ctx.Formality != FormalityNeutral { - return ctx.Formality - } - } - - // Check if data is a Subject with explicit formality - if subj, ok := data.(*Subject); ok && subj != nil { - if subj.formality != FormalityNeutral { - return subj.formality - } - } - - // Check if data is a map with Formality field - if m, ok := data.(map[string]any); ok { - switch f := m["Formality"].(type) { - case Formality: - if f != FormalityNeutral { - return f - } - case string: - // Support string values for convenience - switch strings.ToLower(f) { - case "formal": - return FormalityFormal - case "informal": - return FormalityInformal - } - } - } - - // Fall back to service default - return s.formality -} - -// handleMissingKey handles a missing translation key based on the current mode. -// Must be called with s.mu.RLock held. -// -// In ModeStrict, this panics - use only in development/CI to catch missing keys. -// In ModeCollect, this dispatches to OnMissingKey handler for logging/collection. -// In ModeNormal (default), this returns the key as-is. -func (s *Service) handleMissingKey(key string, args []any) string { - switch s.mode { - case ModeStrict: - // WARNING: Panics! Use ModeStrict only in development/CI environments. - panic(fmt.Sprintf("i18n: missing translation key %q", key)) - case ModeCollect: - // Convert args to map for the action - var argsMap map[string]any - if len(args) > 0 { - if m, ok := args[0].(map[string]any); ok { - argsMap = m - } - } - dispatchMissingKey(key, argsMap) - return "[" + key + "]" - default: - return key - } -} - -// Raw is the raw translation helper without i18n.* namespace magic. -// Use T() for smart i18n.* handling, Raw() for direct key lookup. -func (s *Service) Raw(messageID string, args ...any) string { - s.mu.RLock() - defer s.mu.RUnlock() - - var data any - if len(args) > 0 { - data = args[0] - } - - text := s.resolveWithFallback(messageID, data) - if text == "" { - return s.handleMissingKey(messageID, args) - } - - if s.debug { - return debugFormat(messageID, text) - } - return text -} - -// getMessage retrieves a message by language and key. -// Returns the message and true if found, or empty Message and false if not. -func (s *Service) getMessage(lang, key string) (Message, bool) { - msgs, ok := s.messages[lang] - if !ok { - return Message{}, false - } - msg, ok := msgs[key] - return msg, ok -} - -// AddMessages adds messages for a language at runtime. -func (s *Service) AddMessages(lang string, messages map[string]string) { - s.mu.Lock() - defer s.mu.Unlock() - - if s.messages[lang] == nil { - s.messages[lang] = make(map[string]Message) - } - for key, text := range messages { - s.messages[lang][key] = Message{Text: text} - } -} - -// LoadFS loads additional locale files from a filesystem. -func (s *Service) LoadFS(fsys fs.FS, dir string) error { - s.mu.Lock() - defer s.mu.Unlock() - - entries, err := fs.ReadDir(fsys, dir) - if err != nil { - return fmt.Errorf("failed to read locales directory: %w", err) - } - - for _, entry := range entries { - if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".json") { - continue - } - - filePath := path.Join(dir, entry.Name()) // Use path.Join for fs.FS (forward slashes) - data, err := fs.ReadFile(fsys, filePath) - if err != nil { - return fmt.Errorf("failed to read locale %q: %w", entry.Name(), err) - } - - lang := strings.TrimSuffix(entry.Name(), ".json") - lang = strings.ReplaceAll(lang, "_", "-") - - if err := s.loadJSON(lang, data); err != nil { - return fmt.Errorf("failed to parse locale %q: %w", entry.Name(), err) - } - - // Add to available languages if new - tag := language.Make(lang) - found := false - for _, existing := range s.availableLangs { - if existing == tag { - found = true - break - } - } - if !found { - s.availableLangs = append(s.availableLangs, tag) - } - } - - return nil -} diff --git a/pkg/i18n/time.go b/pkg/i18n/time.go deleted file mode 100644 index 6bececf..0000000 --- a/pkg/i18n/time.go +++ /dev/null @@ -1,55 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -import ( - "fmt" - "time" -) - -// TimeAgo returns a localized relative time string. -// -// TimeAgo(time.Now().Add(-5 * time.Minute)) // "5 minutes ago" -// TimeAgo(time.Now().Add(-1 * time.Hour)) // "1 hour ago" -func TimeAgo(t time.Time) string { - duration := time.Since(t) - - switch { - case duration < time.Minute: - return T("time.just_now") - case duration < time.Hour: - mins := int(duration.Minutes()) - return FormatAgo(mins, "minute") - case duration < 24*time.Hour: - hours := int(duration.Hours()) - return FormatAgo(hours, "hour") - case duration < 7*24*time.Hour: - days := int(duration.Hours() / 24) - return FormatAgo(days, "day") - default: - weeks := int(duration.Hours() / (24 * 7)) - return FormatAgo(weeks, "week") - } -} - -// FormatAgo formats "N unit ago" with proper pluralization. -// Uses locale-specific patterns from time.ago.{unit}. -// -// FormatAgo(5, "minute") // "5 minutes ago" -// FormatAgo(1, "hour") // "1 hour ago" -func FormatAgo(count int, unit string) string { - svc := Default() - if svc == nil { - return fmt.Sprintf("%d %ss ago", count, unit) - } - - // Try locale-specific pattern: time.ago.{unit} - key := "time.ago." + unit - result := svc.T(key, map[string]any{"Count": count}) - - // If key was returned as-is (not found), compose fallback - if result == key { - return fmt.Sprintf("%d %s ago", count, Pluralize(unit, count)) - } - - return result -} diff --git a/pkg/i18n/time_test.go b/pkg/i18n/time_test.go deleted file mode 100644 index 41f426c..0000000 --- a/pkg/i18n/time_test.go +++ /dev/null @@ -1,85 +0,0 @@ -package i18n - -import ( - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestFormatAgo(t *testing.T) { - svc, err := New() - require.NoError(t, err) - SetDefault(svc) - - tests := []struct { - name string - count int - unit string - expected string - }{ - {"1 second", 1, "second", "1 second ago"}, - {"5 seconds", 5, "second", "5 seconds ago"}, - {"1 minute", 1, "minute", "1 minute ago"}, - {"30 minutes", 30, "minute", "30 minutes ago"}, - {"1 hour", 1, "hour", "1 hour ago"}, - {"3 hours", 3, "hour", "3 hours ago"}, - {"1 day", 1, "day", "1 day ago"}, - {"7 days", 7, "day", "7 days ago"}, - {"1 week", 1, "week", "1 week ago"}, - {"2 weeks", 2, "week", "2 weeks ago"}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := FormatAgo(tt.count, tt.unit) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestTimeAgo(t *testing.T) { - svc, err := New() - require.NoError(t, err) - SetDefault(svc) - - tests := []struct { - name string - ago time.Duration - expected string - }{ - {"just now", 30 * time.Second, "just now"}, - {"1 minute", 1 * time.Minute, "1 minute ago"}, - {"5 minutes", 5 * time.Minute, "5 minutes ago"}, - {"1 hour", 1 * time.Hour, "1 hour ago"}, - {"3 hours", 3 * time.Hour, "3 hours ago"}, - {"1 day", 24 * time.Hour, "1 day ago"}, - {"3 days", 3 * 24 * time.Hour, "3 days ago"}, - {"1 week", 7 * 24 * time.Hour, "1 week ago"}, - {"2 weeks", 14 * 24 * time.Hour, "2 weeks ago"}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := TimeAgo(time.Now().Add(-tt.ago)) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestI18nAgoNamespace(t *testing.T) { - svc, err := New() - require.NoError(t, err) - SetDefault(svc) - - t.Run("i18n.numeric.ago pattern", func(t *testing.T) { - result := T("i18n.numeric.ago", 5, "minute") - assert.Equal(t, "5 minutes ago", result) - }) - - t.Run("i18n.numeric.ago singular", func(t *testing.T) { - result := T("i18n.numeric.ago", 1, "hour") - assert.Equal(t, "1 hour ago", result) - }) -} diff --git a/pkg/i18n/transform.go b/pkg/i18n/transform.go deleted file mode 100644 index 3421db2..0000000 --- a/pkg/i18n/transform.go +++ /dev/null @@ -1,122 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -// getCount extracts a Count value from template data. -func getCount(data any) int { - if data == nil { - return 0 - } - switch d := data.(type) { - case map[string]any: - if c, ok := d["Count"]; ok { - return toInt(c) - } - case map[string]int: - if c, ok := d["Count"]; ok { - return c - } - } - return 0 -} - -// toInt converts any numeric type to int. -func toInt(v any) int { - if v == nil { - return 0 - } - switch n := v.(type) { - case int: - return n - case int64: - return int(n) - case int32: - return int(n) - case int16: - return int(n) - case int8: - return int(n) - case uint: - return int(n) - case uint64: - return int(n) - case uint32: - return int(n) - case uint16: - return int(n) - case uint8: - return int(n) - case float64: - return int(n) - case float32: - return int(n) - } - return 0 -} - -// toInt64 converts any numeric type to int64. -func toInt64(v any) int64 { - if v == nil { - return 0 - } - switch n := v.(type) { - case int: - return int64(n) - case int64: - return n - case int32: - return int64(n) - case int16: - return int64(n) - case int8: - return int64(n) - case uint: - return int64(n) - case uint64: - return int64(n) - case uint32: - return int64(n) - case uint16: - return int64(n) - case uint8: - return int64(n) - case float64: - return int64(n) - case float32: - return int64(n) - } - return 0 -} - -// toFloat64 converts any numeric type to float64. -func toFloat64(v any) float64 { - if v == nil { - return 0 - } - switch n := v.(type) { - case float64: - return n - case float32: - return float64(n) - case int: - return float64(n) - case int64: - return float64(n) - case int32: - return float64(n) - case int16: - return float64(n) - case int8: - return float64(n) - case uint: - return float64(n) - case uint64: - return float64(n) - case uint32: - return float64(n) - case uint16: - return float64(n) - case uint8: - return float64(n) - } - return 0 -} diff --git a/pkg/i18n/types.go b/pkg/i18n/types.go deleted file mode 100644 index ac17aaa..0000000 --- a/pkg/i18n/types.go +++ /dev/null @@ -1,449 +0,0 @@ -// Package i18n provides internationalization for the CLI. -package i18n - -import "sync" - -// --- Core Types --- - -// Mode determines how the i18n service handles missing translation keys. -type Mode int - -const ( - // ModeNormal returns the key as-is when a translation is missing (production). - ModeNormal Mode = iota - // ModeStrict panics immediately when a translation is missing (dev/CI). - ModeStrict - // ModeCollect dispatches MissingKey actions and returns [key] (QA testing). - ModeCollect -) - -// String returns the string representation of the Mode. -func (m Mode) String() string { - switch m { - case ModeNormal: - return "normal" - case ModeStrict: - return "strict" - case ModeCollect: - return "collect" - default: - return "unknown" - } -} - -// Formality represents the level of formality in translations. -// Used for languages that distinguish formal/informal address (Sie/du, vous/tu). -type Formality int - -const ( - // FormalityNeutral uses context-appropriate formality (default) - FormalityNeutral Formality = iota - // FormalityInformal uses informal address (du, tu, you) - FormalityInformal - // FormalityFormal uses formal address (Sie, vous, usted) - FormalityFormal -) - -// TextDirection represents text directionality. -type TextDirection int - -const ( - // DirLTR is left-to-right text direction (English, German, etc.) - DirLTR TextDirection = iota - // DirRTL is right-to-left text direction (Arabic, Hebrew, etc.) - DirRTL -) - -// PluralCategory represents CLDR plural categories. -// Different languages use different subsets of these categories. -type PluralCategory int - -const ( - // PluralOther is the default/fallback category - PluralOther PluralCategory = iota - // PluralZero is used when count == 0 (Arabic, Latvian, etc.) - PluralZero - // PluralOne is used when count == 1 (most languages) - PluralOne - // PluralTwo is used when count == 2 (Arabic, Welsh, etc.) - PluralTwo - // PluralFew is used for small numbers (Slavic: 2-4, Arabic: 3-10, etc.) - PluralFew - // PluralMany is used for larger numbers (Slavic: 5+, Arabic: 11-99, etc.) - PluralMany -) - -// GrammaticalGender represents grammatical gender for nouns. -type GrammaticalGender int - -const ( - // GenderNeuter is used for neuter nouns (das in German, it in English) - GenderNeuter GrammaticalGender = iota - // GenderMasculine is used for masculine nouns (der in German, le in French) - GenderMasculine - // GenderFeminine is used for feminine nouns (die in German, la in French) - GenderFeminine - // GenderCommon is used in languages with common gender (Swedish, Dutch) - GenderCommon -) - -// --- Message Types --- - -// Message represents a translation - either a simple string or plural forms. -// Supports full CLDR plural categories for languages with complex plural rules. -type Message struct { - Text string // Simple string value (non-plural) - Zero string // count == 0 (Arabic, Latvian, Welsh) - One string // count == 1 (most languages) - Two string // count == 2 (Arabic, Welsh) - Few string // Small numbers (Slavic: 2-4, Arabic: 3-10) - Many string // Larger numbers (Slavic: 5+, Arabic: 11-99) - Other string // Default/fallback form -} - -// ForCategory returns the appropriate text for a plural category. -// Falls back through the category hierarchy to find a non-empty string. -func (m Message) ForCategory(cat PluralCategory) string { - switch cat { - case PluralZero: - if m.Zero != "" { - return m.Zero - } - case PluralOne: - if m.One != "" { - return m.One - } - case PluralTwo: - if m.Two != "" { - return m.Two - } - case PluralFew: - if m.Few != "" { - return m.Few - } - case PluralMany: - if m.Many != "" { - return m.Many - } - } - // Fallback to Other, then One, then Text - if m.Other != "" { - return m.Other - } - if m.One != "" { - return m.One - } - return m.Text -} - -// IsPlural returns true if this message has any plural forms. -func (m Message) IsPlural() bool { - return m.Zero != "" || m.One != "" || m.Two != "" || - m.Few != "" || m.Many != "" || m.Other != "" -} - -// --- Subject Types --- - -// Subject represents a typed subject with metadata for semantic translations. -// Use S() to create a Subject and chain methods for additional context. -type Subject struct { - Noun string // The noun type (e.g., "file", "repo", "user") - Value any // The actual value (e.g., filename, struct, etc.) - count int // Count for pluralization (default 1) - gender string // Grammatical gender for languages that need it - location string // Location context (e.g., "in workspace") - formality Formality // Formality level override -} - -// --- Intent Types --- - -// IntentMeta defines the behaviour and characteristics of an intent. -type IntentMeta struct { - Type string // "action", "question", "info" - Verb string // Reference to verb key (e.g., "delete", "save") - Dangerous bool // If true, requires extra confirmation - Default string // Default response: "yes" or "no" - Supports []string // Extra options supported by this intent -} - -// Composed holds all output forms for an intent after template resolution. -type Composed struct { - Question string // Question form: "Delete config.yaml?" - Confirm string // Confirmation form: "Really delete config.yaml?" - Success string // Success message: "config.yaml deleted" - Failure string // Failure message: "Failed to delete config.yaml" - Meta IntentMeta // Intent metadata for UI decisions -} - -// Intent defines a semantic intent with templates for all output forms. -type Intent struct { - Meta IntentMeta // Intent behaviour and characteristics - Question string // Template for question form - Confirm string // Template for confirmation form - Success string // Template for success message - Failure string // Template for failure message -} - -// templateData is passed to intent templates during execution. -type templateData struct { - Subject string // Display value of subject - Noun string // Noun type - Count int // Count for pluralization - Gender string // Grammatical gender - Location string // Location context - Formality Formality // Formality level - IsFormal bool // Convenience: formality == FormalityFormal - IsPlural bool // Convenience: count != 1 - Value any // Raw value (for complex templates) -} - -// --- Grammar Types --- - -// GrammarData holds language-specific grammar forms loaded from JSON. -type GrammarData struct { - Verbs map[string]VerbForms // verb -> forms - Nouns map[string]NounForms // noun -> forms - Articles ArticleForms // article configuration - Words map[string]string // base word translations - Punct PunctuationRules // language-specific punctuation -} - -// VerbForms holds irregular verb conjugations. -type VerbForms struct { - Past string // Past tense (e.g., "deleted") - Gerund string // Present participle (e.g., "deleting") -} - -// NounForms holds plural and gender information for a noun. -type NounForms struct { - One string // Singular form - Other string // Plural form - Gender string // Grammatical gender (masculine, feminine, neuter, common) -} - -// ArticleForms holds article configuration for a language. -type ArticleForms struct { - IndefiniteDefault string // Default indefinite article (e.g., "a") - IndefiniteVowel string // Indefinite article before vowel sounds (e.g., "an") - Definite string // Definite article (e.g., "the") - ByGender map[string]string // Gender-specific articles for gendered languages -} - -// PunctuationRules holds language-specific punctuation patterns. -type PunctuationRules struct { - LabelSuffix string // Suffix for labels (default ":") - ProgressSuffix string // Suffix for progress (default "...") -} - -// --- Number Formatting --- - -// NumberFormat defines locale-specific number formatting rules. -type NumberFormat struct { - ThousandsSep string // "," for en, "." for de - DecimalSep string // "." for en, "," for de - PercentFmt string // "%s%%" for en, "%s %%" for de (space before %) -} - -// --- Function Types --- - -// PluralRule is a function that determines the plural category for a count. -type PluralRule func(n int) PluralCategory - -// MissingKeyHandler receives missing key events for analysis. -type MissingKeyHandler func(missing MissingKey) - -// MissingKey is dispatched when a translation key is not found in ModeCollect. -type MissingKey struct { - Key string // The missing translation key - Args map[string]any // Arguments passed to the translation - CallerFile string // Source file where T() was called - CallerLine int // Line number where T() was called -} - -// --- Interfaces --- - -// KeyHandler processes translation keys before standard lookup. -// Handlers form a chain; each can handle a key or delegate to the next handler. -// Use this to implement dynamic key patterns like i18n.label.*, i18n.progress.*, etc. -type KeyHandler interface { - // Match returns true if this handler should process the key. - Match(key string) bool - - // Handle processes the key and returns the result. - // Call next() to delegate to the next handler in the chain. - Handle(key string, args []any, next func() string) string -} - -// Loader provides translation data to the Service. -// Implement this interface to support custom storage backends (database, remote API, etc.). -type Loader interface { - // Load returns messages and grammar data for a language. - // Returns an error if the language cannot be loaded. - Load(lang string) (map[string]Message, *GrammarData, error) - - // Languages returns all available language codes. - Languages() []string -} - -// Translator defines the interface for translation services. -type Translator interface { - T(messageID string, args ...any) string - SetLanguage(lang string) error - Language() string - SetMode(m Mode) - Mode() Mode - SetDebug(enabled bool) - Debug() bool - SetFormality(f Formality) - Formality() Formality - Direction() TextDirection - IsRTL() bool - PluralCategory(n int) PluralCategory - AvailableLanguages() []string -} - -// --- Package Variables --- - -// grammarCache holds loaded grammar data per language. -var ( - grammarCache = make(map[string]*GrammarData) - grammarCacheMu sync.RWMutex -) - -// templateCache stores compiled templates for reuse. -var templateCache sync.Map - -// numberFormats contains default number formats by language. -var numberFormats = map[string]NumberFormat{ - "en": {ThousandsSep: ",", DecimalSep: ".", PercentFmt: "%s%%"}, - "de": {ThousandsSep: ".", DecimalSep: ",", PercentFmt: "%s %%"}, - "fr": {ThousandsSep: " ", DecimalSep: ",", PercentFmt: "%s %%"}, - "es": {ThousandsSep: ".", DecimalSep: ",", PercentFmt: "%s%%"}, - "zh": {ThousandsSep: ",", DecimalSep: ".", PercentFmt: "%s%%"}, -} - -// rtlLanguages contains language codes that use right-to-left text direction. -var rtlLanguages = map[string]bool{ - "ar": true, "ar-SA": true, "ar-EG": true, - "he": true, "he-IL": true, - "fa": true, "fa-IR": true, - "ur": true, "ur-PK": true, - "yi": true, "ps": true, "sd": true, "ug": true, -} - -// pluralRules contains CLDR plural rules for supported languages. -var pluralRules = map[string]PluralRule{ - "en": pluralRuleEnglish, "en-GB": pluralRuleEnglish, "en-US": pluralRuleEnglish, - "de": pluralRuleGerman, "de-DE": pluralRuleGerman, "de-AT": pluralRuleGerman, "de-CH": pluralRuleGerman, - "fr": pluralRuleFrench, "fr-FR": pluralRuleFrench, "fr-CA": pluralRuleFrench, - "es": pluralRuleSpanish, "es-ES": pluralRuleSpanish, "es-MX": pluralRuleSpanish, - "ru": pluralRuleRussian, "ru-RU": pluralRuleRussian, - "pl": pluralRulePolish, "pl-PL": pluralRulePolish, - "ar": pluralRuleArabic, "ar-SA": pluralRuleArabic, - "zh": pluralRuleChinese, "zh-CN": pluralRuleChinese, "zh-TW": pluralRuleChinese, - "ja": pluralRuleJapanese, "ja-JP": pluralRuleJapanese, - "ko": pluralRuleKorean, "ko-KR": pluralRuleKorean, -} - -// --- Irregular Forms --- - -// irregularVerbs maps base verbs to their irregular forms. -var irregularVerbs = map[string]VerbForms{ - "be": {Past: "was", Gerund: "being"}, "have": {Past: "had", Gerund: "having"}, - "do": {Past: "did", Gerund: "doing"}, "go": {Past: "went", Gerund: "going"}, - "make": {Past: "made", Gerund: "making"}, "get": {Past: "got", Gerund: "getting"}, - "run": {Past: "ran", Gerund: "running"}, "set": {Past: "set", Gerund: "setting"}, - "put": {Past: "put", Gerund: "putting"}, "cut": {Past: "cut", Gerund: "cutting"}, - "let": {Past: "let", Gerund: "letting"}, "hit": {Past: "hit", Gerund: "hitting"}, - "shut": {Past: "shut", Gerund: "shutting"}, "split": {Past: "split", Gerund: "splitting"}, - "spread": {Past: "spread", Gerund: "spreading"}, "read": {Past: "read", Gerund: "reading"}, - "write": {Past: "wrote", Gerund: "writing"}, "send": {Past: "sent", Gerund: "sending"}, - "build": {Past: "built", Gerund: "building"}, "begin": {Past: "began", Gerund: "beginning"}, - "find": {Past: "found", Gerund: "finding"}, "take": {Past: "took", Gerund: "taking"}, - "see": {Past: "saw", Gerund: "seeing"}, "keep": {Past: "kept", Gerund: "keeping"}, - "hold": {Past: "held", Gerund: "holding"}, "tell": {Past: "told", Gerund: "telling"}, - "bring": {Past: "brought", Gerund: "bringing"}, "think": {Past: "thought", Gerund: "thinking"}, - "buy": {Past: "bought", Gerund: "buying"}, "catch": {Past: "caught", Gerund: "catching"}, - "teach": {Past: "taught", Gerund: "teaching"}, "throw": {Past: "threw", Gerund: "throwing"}, - "grow": {Past: "grew", Gerund: "growing"}, "know": {Past: "knew", Gerund: "knowing"}, - "show": {Past: "showed", Gerund: "showing"}, "draw": {Past: "drew", Gerund: "drawing"}, - "break": {Past: "broke", Gerund: "breaking"}, "speak": {Past: "spoke", Gerund: "speaking"}, - "choose": {Past: "chose", Gerund: "choosing"}, "forget": {Past: "forgot", Gerund: "forgetting"}, - "lose": {Past: "lost", Gerund: "losing"}, "win": {Past: "won", Gerund: "winning"}, - "swim": {Past: "swam", Gerund: "swimming"}, "drive": {Past: "drove", Gerund: "driving"}, - "rise": {Past: "rose", Gerund: "rising"}, "shine": {Past: "shone", Gerund: "shining"}, - "sing": {Past: "sang", Gerund: "singing"}, "ring": {Past: "rang", Gerund: "ringing"}, - "drink": {Past: "drank", Gerund: "drinking"}, "sink": {Past: "sank", Gerund: "sinking"}, - "sit": {Past: "sat", Gerund: "sitting"}, "stand": {Past: "stood", Gerund: "standing"}, - "hang": {Past: "hung", Gerund: "hanging"}, "dig": {Past: "dug", Gerund: "digging"}, - "stick": {Past: "stuck", Gerund: "sticking"}, "bite": {Past: "bit", Gerund: "biting"}, - "hide": {Past: "hid", Gerund: "hiding"}, "feed": {Past: "fed", Gerund: "feeding"}, - "meet": {Past: "met", Gerund: "meeting"}, "lead": {Past: "led", Gerund: "leading"}, - "sleep": {Past: "slept", Gerund: "sleeping"}, "feel": {Past: "felt", Gerund: "feeling"}, - "leave": {Past: "left", Gerund: "leaving"}, "mean": {Past: "meant", Gerund: "meaning"}, - "lend": {Past: "lent", Gerund: "lending"}, "spend": {Past: "spent", Gerund: "spending"}, - "bend": {Past: "bent", Gerund: "bending"}, "deal": {Past: "dealt", Gerund: "dealing"}, - "lay": {Past: "laid", Gerund: "laying"}, "pay": {Past: "paid", Gerund: "paying"}, - "say": {Past: "said", Gerund: "saying"}, "sell": {Past: "sold", Gerund: "selling"}, - "seek": {Past: "sought", Gerund: "seeking"}, "fight": {Past: "fought", Gerund: "fighting"}, - "fly": {Past: "flew", Gerund: "flying"}, "wear": {Past: "wore", Gerund: "wearing"}, - "tear": {Past: "tore", Gerund: "tearing"}, "bear": {Past: "bore", Gerund: "bearing"}, - "swear": {Past: "swore", Gerund: "swearing"}, "wake": {Past: "woke", Gerund: "waking"}, - "freeze": {Past: "froze", Gerund: "freezing"}, "steal": {Past: "stole", Gerund: "stealing"}, - "overwrite": {Past: "overwritten", Gerund: "overwriting"}, "reset": {Past: "reset", Gerund: "resetting"}, - "reboot": {Past: "rebooted", Gerund: "rebooting"}, - // Multi-syllable verbs with stressed final syllables (double consonant) - "submit": {Past: "submitted", Gerund: "submitting"}, "permit": {Past: "permitted", Gerund: "permitting"}, - "admit": {Past: "admitted", Gerund: "admitting"}, "omit": {Past: "omitted", Gerund: "omitting"}, - "commit": {Past: "committed", Gerund: "committing"}, "transmit": {Past: "transmitted", Gerund: "transmitting"}, - "prefer": {Past: "preferred", Gerund: "preferring"}, "refer": {Past: "referred", Gerund: "referring"}, - "transfer": {Past: "transferred", Gerund: "transferring"}, "defer": {Past: "deferred", Gerund: "deferring"}, - "confer": {Past: "conferred", Gerund: "conferring"}, "infer": {Past: "inferred", Gerund: "inferring"}, - "occur": {Past: "occurred", Gerund: "occurring"}, "recur": {Past: "recurred", Gerund: "recurring"}, - "incur": {Past: "incurred", Gerund: "incurring"}, "deter": {Past: "deterred", Gerund: "deterring"}, - "control": {Past: "controlled", Gerund: "controlling"}, "patrol": {Past: "patrolled", Gerund: "patrolling"}, - "compel": {Past: "compelled", Gerund: "compelling"}, "expel": {Past: "expelled", Gerund: "expelling"}, - "propel": {Past: "propelled", Gerund: "propelling"}, "repel": {Past: "repelled", Gerund: "repelling"}, - "rebel": {Past: "rebelled", Gerund: "rebelling"}, "excel": {Past: "excelled", Gerund: "excelling"}, - "cancel": {Past: "cancelled", Gerund: "cancelling"}, "travel": {Past: "travelled", Gerund: "travelling"}, - "label": {Past: "labelled", Gerund: "labelling"}, "model": {Past: "modelled", Gerund: "modelling"}, - "level": {Past: "levelled", Gerund: "levelling"}, -} - -// noDoubleConsonant contains multi-syllable verbs that don't double the final consonant. -var noDoubleConsonant = map[string]bool{ - "open": true, "listen": true, "happen": true, "enter": true, "offer": true, - "suffer": true, "differ": true, "cover": true, "deliver": true, "develop": true, - "visit": true, "limit": true, "edit": true, "credit": true, "orbit": true, - "total": true, "target": true, "budget": true, "market": true, "benefit": true, "focus": true, -} - -// irregularNouns maps singular nouns to their irregular plural forms. -var irregularNouns = map[string]string{ - "child": "children", "person": "people", "man": "men", "woman": "women", - "foot": "feet", "tooth": "teeth", "mouse": "mice", "goose": "geese", - "ox": "oxen", "index": "indices", "appendix": "appendices", "matrix": "matrices", - "vertex": "vertices", "crisis": "crises", "analysis": "analyses", "diagnosis": "diagnoses", - "thesis": "theses", "hypothesis": "hypotheses", "parenthesis": "parentheses", - "datum": "data", "medium": "media", "bacterium": "bacteria", "criterion": "criteria", - "phenomenon": "phenomena", "curriculum": "curricula", "alumnus": "alumni", - "cactus": "cacti", "focus": "foci", "fungus": "fungi", "nucleus": "nuclei", - "radius": "radii", "stimulus": "stimuli", "syllabus": "syllabi", - "fish": "fish", "sheep": "sheep", "deer": "deer", "species": "species", - "series": "series", "aircraft": "aircraft", - "life": "lives", "wife": "wives", "knife": "knives", "leaf": "leaves", - "half": "halves", "self": "selves", "shelf": "shelves", "wolf": "wolves", - "calf": "calves", "loaf": "loaves", "thief": "thieves", -} - -// vowelSounds contains words that start with consonants but have vowel sounds. -var vowelSounds = map[string]bool{ - "hour": true, "honest": true, "honour": true, "honor": true, "heir": true, "herb": true, -} - -// consonantSounds contains words that start with vowels but have consonant sounds. -var consonantSounds = map[string]bool{ - "user": true, "union": true, "unique": true, "unit": true, "universe": true, - "university": true, "uniform": true, "usage": true, "usual": true, "utility": true, - "utensil": true, "one": true, "once": true, "euro": true, "eulogy": true, "euphemism": true, -} diff --git a/pkg/log/log.go b/pkg/log/log.go deleted file mode 100644 index d308cfc..0000000 --- a/pkg/log/log.go +++ /dev/null @@ -1,213 +0,0 @@ -// Package log provides structured logging for Core applications. -// -// The package works standalone or integrated with the Core framework: -// -// // Standalone usage -// log.SetLevel(log.LevelDebug) -// log.Info("server started", "port", 8080) -// log.Error("failed to connect", "err", err) -// -// // With Core framework -// core.New( -// framework.WithName("log", log.NewService(log.Options{Level: log.LevelInfo})), -// ) -package log - -import ( - "fmt" - "io" - "os" - "sync" - "time" -) - -// Level defines logging verbosity. -type Level int - -const ( - LevelQuiet Level = iota - LevelError - LevelWarn - LevelInfo - LevelDebug -) - -// String returns the level name. -func (l Level) String() string { - switch l { - case LevelQuiet: - return "quiet" - case LevelError: - return "error" - case LevelWarn: - return "warn" - case LevelInfo: - return "info" - case LevelDebug: - return "debug" - default: - return "unknown" - } -} - -// Logger provides structured logging. -type Logger struct { - mu sync.RWMutex - level Level - output io.Writer - - // Style functions for formatting (can be overridden) - StyleTimestamp func(string) string - StyleDebug func(string) string - StyleInfo func(string) string - StyleWarn func(string) string - StyleError func(string) string -} - -// Options configures a Logger. -type Options struct { - Level Level - Output io.Writer // defaults to os.Stderr -} - -// New creates a new Logger with the given options. -func New(opts Options) *Logger { - output := opts.Output - if output == nil { - output = os.Stderr - } - - return &Logger{ - level: opts.Level, - output: output, - StyleTimestamp: identity, - StyleDebug: identity, - StyleInfo: identity, - StyleWarn: identity, - StyleError: identity, - } -} - -func identity(s string) string { return s } - -// SetLevel changes the log level. -func (l *Logger) SetLevel(level Level) { - l.mu.Lock() - l.level = level - l.mu.Unlock() -} - -// Level returns the current log level. -func (l *Logger) Level() Level { - l.mu.RLock() - defer l.mu.RUnlock() - return l.level -} - -// SetOutput changes the output writer. -func (l *Logger) SetOutput(w io.Writer) { - l.mu.Lock() - l.output = w - l.mu.Unlock() -} - -func (l *Logger) shouldLog(level Level) bool { - l.mu.RLock() - defer l.mu.RUnlock() - return level <= l.level -} - -func (l *Logger) log(level Level, prefix, msg string, keyvals ...any) { - l.mu.RLock() - output := l.output - styleTimestamp := l.StyleTimestamp - l.mu.RUnlock() - - timestamp := styleTimestamp(time.Now().Format("15:04:05")) - - // Format key-value pairs - var kvStr string - if len(keyvals) > 0 { - kvStr = " " - for i := 0; i < len(keyvals); i += 2 { - if i > 0 { - kvStr += " " - } - key := keyvals[i] - var val any - if i+1 < len(keyvals) { - val = keyvals[i+1] - } - kvStr += fmt.Sprintf("%v=%v", key, val) - } - } - - fmt.Fprintf(output, "%s %s %s%s\n", timestamp, prefix, msg, kvStr) -} - -// Debug logs a debug message with optional key-value pairs. -func (l *Logger) Debug(msg string, keyvals ...any) { - if l.shouldLog(LevelDebug) { - l.log(LevelDebug, l.StyleDebug("[DBG]"), msg, keyvals...) - } -} - -// Info logs an info message with optional key-value pairs. -func (l *Logger) Info(msg string, keyvals ...any) { - if l.shouldLog(LevelInfo) { - l.log(LevelInfo, l.StyleInfo("[INF]"), msg, keyvals...) - } -} - -// Warn logs a warning message with optional key-value pairs. -func (l *Logger) Warn(msg string, keyvals ...any) { - if l.shouldLog(LevelWarn) { - l.log(LevelWarn, l.StyleWarn("[WRN]"), msg, keyvals...) - } -} - -// Error logs an error message with optional key-value pairs. -func (l *Logger) Error(msg string, keyvals ...any) { - if l.shouldLog(LevelError) { - l.log(LevelError, l.StyleError("[ERR]"), msg, keyvals...) - } -} - -// --- Default logger --- - -var defaultLogger = New(Options{Level: LevelInfo}) - -// Default returns the default logger. -func Default() *Logger { - return defaultLogger -} - -// SetDefault sets the default logger. -func SetDefault(l *Logger) { - defaultLogger = l -} - -// SetLevel sets the default logger's level. -func SetLevel(level Level) { - defaultLogger.SetLevel(level) -} - -// Debug logs to the default logger. -func Debug(msg string, keyvals ...any) { - defaultLogger.Debug(msg, keyvals...) -} - -// Info logs to the default logger. -func Info(msg string, keyvals ...any) { - defaultLogger.Info(msg, keyvals...) -} - -// Warn logs to the default logger. -func Warn(msg string, keyvals ...any) { - defaultLogger.Warn(msg, keyvals...) -} - -// Error logs to the default logger. -func Error(msg string, keyvals ...any) { - defaultLogger.Error(msg, keyvals...) -} diff --git a/pkg/log/log_test.go b/pkg/log/log_test.go deleted file mode 100644 index 6721e39..0000000 --- a/pkg/log/log_test.go +++ /dev/null @@ -1,124 +0,0 @@ -package log - -import ( - "bytes" - "strings" - "testing" -) - -func TestLogger_Levels(t *testing.T) { - tests := []struct { - name string - level Level - logFunc func(*Logger, string, ...any) - expected bool - }{ - {"debug at debug", LevelDebug, (*Logger).Debug, true}, - {"info at debug", LevelDebug, (*Logger).Info, true}, - {"warn at debug", LevelDebug, (*Logger).Warn, true}, - {"error at debug", LevelDebug, (*Logger).Error, true}, - - {"debug at info", LevelInfo, (*Logger).Debug, false}, - {"info at info", LevelInfo, (*Logger).Info, true}, - {"warn at info", LevelInfo, (*Logger).Warn, true}, - {"error at info", LevelInfo, (*Logger).Error, true}, - - {"debug at warn", LevelWarn, (*Logger).Debug, false}, - {"info at warn", LevelWarn, (*Logger).Info, false}, - {"warn at warn", LevelWarn, (*Logger).Warn, true}, - {"error at warn", LevelWarn, (*Logger).Error, true}, - - {"debug at error", LevelError, (*Logger).Debug, false}, - {"info at error", LevelError, (*Logger).Info, false}, - {"warn at error", LevelError, (*Logger).Warn, false}, - {"error at error", LevelError, (*Logger).Error, true}, - - {"debug at quiet", LevelQuiet, (*Logger).Debug, false}, - {"info at quiet", LevelQuiet, (*Logger).Info, false}, - {"warn at quiet", LevelQuiet, (*Logger).Warn, false}, - {"error at quiet", LevelQuiet, (*Logger).Error, false}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - var buf bytes.Buffer - l := New(Options{Level: tt.level, Output: &buf}) - tt.logFunc(l, "test message") - - hasOutput := buf.Len() > 0 - if hasOutput != tt.expected { - t.Errorf("expected output=%v, got output=%v", tt.expected, hasOutput) - } - }) - } -} - -func TestLogger_KeyValues(t *testing.T) { - var buf bytes.Buffer - l := New(Options{Level: LevelDebug, Output: &buf}) - - l.Info("test message", "key1", "value1", "key2", 42) - - output := buf.String() - if !strings.Contains(output, "test message") { - t.Error("expected message in output") - } - if !strings.Contains(output, "key1=value1") { - t.Error("expected key1=value1 in output") - } - if !strings.Contains(output, "key2=42") { - t.Error("expected key2=42 in output") - } -} - -func TestLogger_SetLevel(t *testing.T) { - l := New(Options{Level: LevelInfo}) - - if l.Level() != LevelInfo { - t.Error("expected initial level to be Info") - } - - l.SetLevel(LevelDebug) - if l.Level() != LevelDebug { - t.Error("expected level to be Debug after SetLevel") - } -} - -func TestLevel_String(t *testing.T) { - tests := []struct { - level Level - expected string - }{ - {LevelQuiet, "quiet"}, - {LevelError, "error"}, - {LevelWarn, "warn"}, - {LevelInfo, "info"}, - {LevelDebug, "debug"}, - {Level(99), "unknown"}, - } - - for _, tt := range tests { - t.Run(tt.expected, func(t *testing.T) { - if got := tt.level.String(); got != tt.expected { - t.Errorf("expected %q, got %q", tt.expected, got) - } - }) - } -} - -func TestDefault(t *testing.T) { - // Default logger should exist - if Default() == nil { - t.Error("expected default logger to exist") - } - - // Package-level functions should work - var buf bytes.Buffer - l := New(Options{Level: LevelDebug, Output: &buf}) - SetDefault(l) - - Info("test") - if buf.Len() == 0 { - t.Error("expected package-level Info to produce output") - } -} diff --git a/pkg/log/service.go b/pkg/log/service.go deleted file mode 100644 index ec2103d..0000000 --- a/pkg/log/service.go +++ /dev/null @@ -1,57 +0,0 @@ -package log - -import ( - "context" - - "github.com/host-uk/core/pkg/framework" -) - -// Service wraps Logger for Core framework integration. -type Service struct { - *framework.ServiceRuntime[Options] - *Logger -} - -// NewService creates a log service factory for Core. -func NewService(opts Options) func(*framework.Core) (any, error) { - return func(c *framework.Core) (any, error) { - logger := New(opts) - - return &Service{ - ServiceRuntime: framework.NewServiceRuntime(c, opts), - Logger: logger, - }, nil - } -} - -// OnStartup registers query and task handlers. -func (s *Service) OnStartup(ctx context.Context) error { - s.Core().RegisterQuery(s.handleQuery) - s.Core().RegisterTask(s.handleTask) - return nil -} - -// QueryLevel returns the current log level. -type QueryLevel struct{} - -// TaskSetLevel changes the log level. -type TaskSetLevel struct { - Level Level -} - -func (s *Service) handleQuery(c *framework.Core, q framework.Query) (any, bool, error) { - switch q.(type) { - case QueryLevel: - return s.Level(), true, nil - } - return nil, false, nil -} - -func (s *Service) handleTask(c *framework.Core, t framework.Task) (any, bool, error) { - switch m := t.(type) { - case TaskSetLevel: - s.SetLevel(m.Level) - return nil, true, nil - } - return nil, false, nil -} diff --git a/pkg/mcp/mcp.go b/pkg/mcp/mcp.go deleted file mode 100644 index 2b2345b..0000000 --- a/pkg/mcp/mcp.go +++ /dev/null @@ -1,409 +0,0 @@ -// Package mcp provides a lightweight MCP (Model Context Protocol) server for CLI use. -// For full GUI integration (display, webview, process management), see core-gui/pkg/mcp. -package mcp - -import ( - "context" - "fmt" - "os" - "path/filepath" - "strings" - - "github.com/modelcontextprotocol/go-sdk/mcp" -) - -// Service provides a lightweight MCP server with file operations only. -// For full GUI features, use the core-gui package. -type Service struct { - server *mcp.Server -} - -// New creates a new MCP service with file operations. -func New() *Service { - impl := &mcp.Implementation{ - Name: "core-cli", - Version: "0.1.0", - } - - server := mcp.NewServer(impl, nil) - s := &Service{server: server} - s.registerTools() - return s -} - -// registerTools adds file operation tools to the MCP server. -func (s *Service) registerTools() { - // File operations - mcp.AddTool(s.server, &mcp.Tool{ - Name: "file_read", - Description: "Read the contents of a file", - }, s.readFile) - - mcp.AddTool(s.server, &mcp.Tool{ - Name: "file_write", - Description: "Write content to a file", - }, s.writeFile) - - mcp.AddTool(s.server, &mcp.Tool{ - Name: "file_delete", - Description: "Delete a file or empty directory", - }, s.deleteFile) - - mcp.AddTool(s.server, &mcp.Tool{ - Name: "file_rename", - Description: "Rename or move a file", - }, s.renameFile) - - mcp.AddTool(s.server, &mcp.Tool{ - Name: "file_exists", - Description: "Check if a file or directory exists", - }, s.fileExists) - - mcp.AddTool(s.server, &mcp.Tool{ - Name: "file_edit", - Description: "Edit a file by replacing old_string with new_string. Use replace_all=true to replace all occurrences.", - }, s.editDiff) - - // Directory operations - mcp.AddTool(s.server, &mcp.Tool{ - Name: "dir_list", - Description: "List contents of a directory", - }, s.listDirectory) - - mcp.AddTool(s.server, &mcp.Tool{ - Name: "dir_create", - Description: "Create a new directory", - }, s.createDirectory) - - // Language detection - mcp.AddTool(s.server, &mcp.Tool{ - Name: "lang_detect", - Description: "Detect the programming language of a file", - }, s.detectLanguage) - - mcp.AddTool(s.server, &mcp.Tool{ - Name: "lang_list", - Description: "Get list of supported programming languages", - }, s.getSupportedLanguages) -} - -// Tool input/output types - -type ReadFileInput struct { - Path string `json:"path"` -} - -type ReadFileOutput struct { - Content string `json:"content"` - Language string `json:"language"` - Path string `json:"path"` -} - -type WriteFileInput struct { - Path string `json:"path"` - Content string `json:"content"` -} - -type WriteFileOutput struct { - Success bool `json:"success"` - Path string `json:"path"` -} - -type ListDirectoryInput struct { - Path string `json:"path"` -} - -type ListDirectoryOutput struct { - Entries []DirectoryEntry `json:"entries"` - Path string `json:"path"` -} - -type DirectoryEntry struct { - Name string `json:"name"` - Path string `json:"path"` - IsDir bool `json:"isDir"` - Size int64 `json:"size"` -} - -type CreateDirectoryInput struct { - Path string `json:"path"` -} - -type CreateDirectoryOutput struct { - Success bool `json:"success"` - Path string `json:"path"` -} - -type DeleteFileInput struct { - Path string `json:"path"` -} - -type DeleteFileOutput struct { - Success bool `json:"success"` - Path string `json:"path"` -} - -type RenameFileInput struct { - OldPath string `json:"oldPath"` - NewPath string `json:"newPath"` -} - -type RenameFileOutput struct { - Success bool `json:"success"` - OldPath string `json:"oldPath"` - NewPath string `json:"newPath"` -} - -type FileExistsInput struct { - Path string `json:"path"` -} - -type FileExistsOutput struct { - Exists bool `json:"exists"` - IsDir bool `json:"isDir"` - Path string `json:"path"` -} - -type DetectLanguageInput struct { - Path string `json:"path"` -} - -type DetectLanguageOutput struct { - Language string `json:"language"` - Path string `json:"path"` -} - -type GetSupportedLanguagesInput struct{} - -type GetSupportedLanguagesOutput struct { - Languages []LanguageInfo `json:"languages"` -} - -type LanguageInfo struct { - ID string `json:"id"` - Name string `json:"name"` - Extensions []string `json:"extensions"` -} - -type EditDiffInput struct { - Path string `json:"path"` - OldString string `json:"old_string"` - NewString string `json:"new_string"` - ReplaceAll bool `json:"replace_all,omitempty"` -} - -type EditDiffOutput struct { - Path string `json:"path"` - Success bool `json:"success"` - Replacements int `json:"replacements"` -} - -// Tool handlers - -func (s *Service) readFile(ctx context.Context, req *mcp.CallToolRequest, input ReadFileInput) (*mcp.CallToolResult, ReadFileOutput, error) { - content, err := os.ReadFile(input.Path) - if err != nil { - return nil, ReadFileOutput{}, fmt.Errorf("failed to read file: %w", err) - } - return nil, ReadFileOutput{ - Content: string(content), - Language: detectLanguageFromPath(input.Path), - Path: input.Path, - }, nil -} - -func (s *Service) writeFile(ctx context.Context, req *mcp.CallToolRequest, input WriteFileInput) (*mcp.CallToolResult, WriteFileOutput, error) { - dir := filepath.Dir(input.Path) - if err := os.MkdirAll(dir, 0755); err != nil { - return nil, WriteFileOutput{}, fmt.Errorf("failed to create directory: %w", err) - } - err := os.WriteFile(input.Path, []byte(input.Content), 0644) - if err != nil { - return nil, WriteFileOutput{}, fmt.Errorf("failed to write file: %w", err) - } - return nil, WriteFileOutput{Success: true, Path: input.Path}, nil -} - -func (s *Service) listDirectory(ctx context.Context, req *mcp.CallToolRequest, input ListDirectoryInput) (*mcp.CallToolResult, ListDirectoryOutput, error) { - entries, err := os.ReadDir(input.Path) - if err != nil { - return nil, ListDirectoryOutput{}, fmt.Errorf("failed to list directory: %w", err) - } - result := make([]DirectoryEntry, 0, len(entries)) - for _, e := range entries { - info, _ := e.Info() - var size int64 - if info != nil { - size = info.Size() - } - result = append(result, DirectoryEntry{ - Name: e.Name(), - Path: filepath.Join(input.Path, e.Name()), - IsDir: e.IsDir(), - Size: size, - }) - } - return nil, ListDirectoryOutput{Entries: result, Path: input.Path}, nil -} - -func (s *Service) createDirectory(ctx context.Context, req *mcp.CallToolRequest, input CreateDirectoryInput) (*mcp.CallToolResult, CreateDirectoryOutput, error) { - err := os.MkdirAll(input.Path, 0755) - if err != nil { - return nil, CreateDirectoryOutput{}, fmt.Errorf("failed to create directory: %w", err) - } - return nil, CreateDirectoryOutput{Success: true, Path: input.Path}, nil -} - -func (s *Service) deleteFile(ctx context.Context, req *mcp.CallToolRequest, input DeleteFileInput) (*mcp.CallToolResult, DeleteFileOutput, error) { - err := os.Remove(input.Path) - if err != nil { - return nil, DeleteFileOutput{}, fmt.Errorf("failed to delete file: %w", err) - } - return nil, DeleteFileOutput{Success: true, Path: input.Path}, nil -} - -func (s *Service) renameFile(ctx context.Context, req *mcp.CallToolRequest, input RenameFileInput) (*mcp.CallToolResult, RenameFileOutput, error) { - err := os.Rename(input.OldPath, input.NewPath) - if err != nil { - return nil, RenameFileOutput{}, fmt.Errorf("failed to rename file: %w", err) - } - return nil, RenameFileOutput{Success: true, OldPath: input.OldPath, NewPath: input.NewPath}, nil -} - -func (s *Service) fileExists(ctx context.Context, req *mcp.CallToolRequest, input FileExistsInput) (*mcp.CallToolResult, FileExistsOutput, error) { - info, err := os.Stat(input.Path) - if os.IsNotExist(err) { - return nil, FileExistsOutput{Exists: false, IsDir: false, Path: input.Path}, nil - } - if err != nil { - return nil, FileExistsOutput{}, fmt.Errorf("failed to check file: %w", err) - } - return nil, FileExistsOutput{Exists: true, IsDir: info.IsDir(), Path: input.Path}, nil -} - -func (s *Service) detectLanguage(ctx context.Context, req *mcp.CallToolRequest, input DetectLanguageInput) (*mcp.CallToolResult, DetectLanguageOutput, error) { - lang := detectLanguageFromPath(input.Path) - return nil, DetectLanguageOutput{Language: lang, Path: input.Path}, nil -} - -func (s *Service) getSupportedLanguages(ctx context.Context, req *mcp.CallToolRequest, input GetSupportedLanguagesInput) (*mcp.CallToolResult, GetSupportedLanguagesOutput, error) { - languages := []LanguageInfo{ - {ID: "typescript", Name: "TypeScript", Extensions: []string{".ts", ".tsx"}}, - {ID: "javascript", Name: "JavaScript", Extensions: []string{".js", ".jsx"}}, - {ID: "go", Name: "Go", Extensions: []string{".go"}}, - {ID: "python", Name: "Python", Extensions: []string{".py"}}, - {ID: "rust", Name: "Rust", Extensions: []string{".rs"}}, - {ID: "java", Name: "Java", Extensions: []string{".java"}}, - {ID: "php", Name: "PHP", Extensions: []string{".php"}}, - {ID: "ruby", Name: "Ruby", Extensions: []string{".rb"}}, - {ID: "html", Name: "HTML", Extensions: []string{".html", ".htm"}}, - {ID: "css", Name: "CSS", Extensions: []string{".css"}}, - {ID: "json", Name: "JSON", Extensions: []string{".json"}}, - {ID: "yaml", Name: "YAML", Extensions: []string{".yaml", ".yml"}}, - {ID: "markdown", Name: "Markdown", Extensions: []string{".md", ".markdown"}}, - {ID: "sql", Name: "SQL", Extensions: []string{".sql"}}, - {ID: "shell", Name: "Shell", Extensions: []string{".sh", ".bash"}}, - } - return nil, GetSupportedLanguagesOutput{Languages: languages}, nil -} - -func (s *Service) editDiff(ctx context.Context, req *mcp.CallToolRequest, input EditDiffInput) (*mcp.CallToolResult, EditDiffOutput, error) { - content, err := os.ReadFile(input.Path) - if err != nil { - return nil, EditDiffOutput{}, fmt.Errorf("failed to read file: %w", err) - } - - fileContent := string(content) - count := 0 - - if input.ReplaceAll { - count = strings.Count(fileContent, input.OldString) - if count == 0 { - return nil, EditDiffOutput{}, fmt.Errorf("old_string not found in file") - } - fileContent = strings.ReplaceAll(fileContent, input.OldString, input.NewString) - } else { - if !strings.Contains(fileContent, input.OldString) { - return nil, EditDiffOutput{}, fmt.Errorf("old_string not found in file") - } - fileContent = strings.Replace(fileContent, input.OldString, input.NewString, 1) - count = 1 - } - - err = os.WriteFile(input.Path, []byte(fileContent), 0644) - if err != nil { - return nil, EditDiffOutput{}, fmt.Errorf("failed to write file: %w", err) - } - - return nil, EditDiffOutput{ - Path: input.Path, - Success: true, - Replacements: count, - }, nil -} - -// detectLanguageFromPath maps file extensions to language IDs. -func detectLanguageFromPath(path string) string { - ext := filepath.Ext(path) - switch ext { - case ".ts", ".tsx": - return "typescript" - case ".js", ".jsx": - return "javascript" - case ".go": - return "go" - case ".py": - return "python" - case ".rs": - return "rust" - case ".rb": - return "ruby" - case ".java": - return "java" - case ".php": - return "php" - case ".c", ".h": - return "c" - case ".cpp", ".hpp", ".cc", ".cxx": - return "cpp" - case ".cs": - return "csharp" - case ".html", ".htm": - return "html" - case ".css": - return "css" - case ".scss": - return "scss" - case ".json": - return "json" - case ".yaml", ".yml": - return "yaml" - case ".xml": - return "xml" - case ".md", ".markdown": - return "markdown" - case ".sql": - return "sql" - case ".sh", ".bash": - return "shell" - case ".swift": - return "swift" - case ".kt", ".kts": - return "kotlin" - default: - if filepath.Base(path) == "Dockerfile" { - return "dockerfile" - } - return "plaintext" - } -} - -// Run starts the MCP server on stdio. -func (s *Service) Run(ctx context.Context) error { - return s.server.Run(ctx, &mcp.StdioTransport{}) -} - -// Server returns the underlying MCP server for advanced configuration. -func (s *Service) Server() *mcp.Server { - return s.server -} diff --git a/pkg/monitor/cmd_commands.go b/pkg/monitor/cmd_commands.go deleted file mode 100644 index e760226..0000000 --- a/pkg/monitor/cmd_commands.go +++ /dev/null @@ -1,47 +0,0 @@ -// Package monitor provides security monitoring commands. -// -// Commands: -// - monitor: Aggregate security findings from GitHub Security Tab, workflow artifacts, and PR comments -// -// Data sources (all free tier): -// - Code scanning: Semgrep, Trivy, Gitleaks, OSV-Scanner, Checkov, CodeQL -// - Dependabot: Dependency vulnerability alerts -// - Secret scanning: Exposed secrets/credentials -package monitor - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -func init() { - cli.RegisterCommands(AddMonitorCommands) -} - -// Style aliases from shared package -var ( - successStyle = cli.SuccessStyle - errorStyle = cli.ErrorStyle - warningStyle = cli.WarningStyle - dimStyle = cli.DimStyle -) - -// AddMonitorCommands registers the 'monitor' command. -func AddMonitorCommands(root *cli.Command) { - monitorCmd := &cli.Command{ - Use: "monitor", - Short: i18n.T("cmd.monitor.short"), - Long: i18n.T("cmd.monitor.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runMonitor() - }, - } - - // Flags - monitorCmd.Flags().StringVarP(&monitorRepo, "repo", "r", "", i18n.T("cmd.monitor.flag.repo")) - monitorCmd.Flags().StringSliceVarP(&monitorSeverity, "severity", "s", []string{}, i18n.T("cmd.monitor.flag.severity")) - monitorCmd.Flags().BoolVar(&monitorJSON, "json", false, i18n.T("cmd.monitor.flag.json")) - monitorCmd.Flags().BoolVar(&monitorAll, "all", false, i18n.T("cmd.monitor.flag.all")) - - root.AddCommand(monitorCmd) -} diff --git a/pkg/monitor/cmd_monitor.go b/pkg/monitor/cmd_monitor.go deleted file mode 100644 index d4821e2..0000000 --- a/pkg/monitor/cmd_monitor.go +++ /dev/null @@ -1,589 +0,0 @@ -// cmd_monitor.go implements the 'monitor' command for aggregating security findings. -// -// Usage: -// core monitor # Monitor current repo -// core monitor --repo X # Monitor specific repo -// core monitor --all # Monitor all repos in registry -// core monitor --severity high # Filter by severity -// core monitor --json # Output as JSON - -package monitor - -import ( - "encoding/json" - "fmt" - "os/exec" - "sort" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/errors" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" -) - -// Command flags -var ( - monitorRepo string - monitorSeverity []string - monitorJSON bool - monitorAll bool -) - -// Finding represents a security finding from any source -type Finding struct { - Source string `json:"source"` // semgrep, trivy, dependabot, secret-scanning, etc. - Severity string `json:"severity"` // critical, high, medium, low - Rule string `json:"rule"` // Rule ID or CVE - File string `json:"file"` // Affected file path - Line int `json:"line"` // Line number (0 if N/A) - Message string `json:"message"` // Description - URL string `json:"url"` // Link to finding - State string `json:"state"` // open, dismissed, fixed - RepoName string `json:"repo"` // Repository name - CreatedAt string `json:"created_at"` // When found - Labels []string `json:"suggested_labels,omitempty"` -} - -// CodeScanningAlert represents a GitHub code scanning alert -type CodeScanningAlert struct { - Number int `json:"number"` - State string `json:"state"` // open, dismissed, fixed - Rule struct { - ID string `json:"id"` - Severity string `json:"severity"` - Description string `json:"description"` - } `json:"rule"` - Tool struct { - Name string `json:"name"` - } `json:"tool"` - MostRecentInstance struct { - Location struct { - Path string `json:"path"` - StartLine int `json:"start_line"` - } `json:"location"` - Message struct { - Text string `json:"text"` - } `json:"message"` - } `json:"most_recent_instance"` - HTMLURL string `json:"html_url"` - CreatedAt string `json:"created_at"` -} - -// DependabotAlert represents a GitHub Dependabot alert -type DependabotAlert struct { - Number int `json:"number"` - State string `json:"state"` // open, dismissed, fixed - SecurityVulnerability struct { - Severity string `json:"severity"` - Package struct { - Name string `json:"name"` - Ecosystem string `json:"ecosystem"` - } `json:"package"` - } `json:"security_vulnerability"` - SecurityAdvisory struct { - CVEID string `json:"cve_id"` - Summary string `json:"summary"` - Description string `json:"description"` - } `json:"security_advisory"` - Dependency struct { - ManifestPath string `json:"manifest_path"` - } `json:"dependency"` - HTMLURL string `json:"html_url"` - CreatedAt string `json:"created_at"` -} - -// SecretScanningAlert represents a GitHub secret scanning alert -type SecretScanningAlert struct { - Number int `json:"number"` - State string `json:"state"` // open, resolved - SecretType string `json:"secret_type"` - Secret string `json:"secret"` // Partial, redacted - HTMLURL string `json:"html_url"` - LocationType string `json:"location_type"` - CreatedAt string `json:"created_at"` -} - -func runMonitor() error { - // Check gh is available - if _, err := exec.LookPath("gh"); err != nil { - return errors.E("monitor", i18n.T("error.gh_not_found"), err) - } - - // Determine repos to scan - repoList, err := resolveRepos() - if err != nil { - return err - } - - if len(repoList) == 0 { - return errors.E("monitor", i18n.T("cmd.monitor.error.no_repos"), nil) - } - - // Collect all findings and errors - var allFindings []Finding - var fetchErrors []string - for _, repo := range repoList { - if !monitorJSON { - cli.Print("\033[2K\r%s %s...", dimStyle.Render(i18n.T("cmd.monitor.scanning")), repo) - } - - findings, errs := fetchRepoFindings(repo) - allFindings = append(allFindings, findings...) - fetchErrors = append(fetchErrors, errs...) - } - - // Filter by severity if specified - if len(monitorSeverity) > 0 { - allFindings = filterBySeverity(allFindings, monitorSeverity) - } - - // Sort by severity (critical first) - sortBySeverity(allFindings) - - // Output - if monitorJSON { - return outputJSON(allFindings) - } - - cli.Print("\033[2K\r") // Clear scanning line - - // Show any fetch errors as warnings - if len(fetchErrors) > 0 { - for _, e := range fetchErrors { - cli.Print("%s %s\n", warningStyle.Render("!"), e) - } - cli.Blank() - } - - return outputTable(allFindings) -} - -// resolveRepos determines which repos to scan -func resolveRepos() ([]string, error) { - if monitorRepo != "" { - // Specific repo - if fully qualified (org/repo), use as-is - if strings.Contains(monitorRepo, "/") { - return []string{monitorRepo}, nil - } - // Otherwise, try to detect org from git remote, fallback to host-uk - // Note: Users outside host-uk org should use fully qualified names - org := detectOrgFromGit() - if org == "" { - org = "host-uk" - } - return []string{org + "/" + monitorRepo}, nil - } - - if monitorAll { - // All repos from registry - registry, err := repos.FindRegistry() - if err != nil { - return nil, errors.E("monitor", "failed to find registry", err) - } - - loaded, err := repos.LoadRegistry(registry) - if err != nil { - return nil, errors.E("monitor", "failed to load registry", err) - } - - var repoList []string - for _, r := range loaded.Repos { - repoList = append(repoList, loaded.Org+"/"+r.Name) - } - return repoList, nil - } - - // Default to current repo - repo, err := detectRepoFromGit() - if err != nil { - return nil, err - } - return []string{repo}, nil -} - -// fetchRepoFindings fetches all security findings for a repo -// Returns findings and any errors encountered (errors don't stop other fetches) -func fetchRepoFindings(repoFullName string) ([]Finding, []string) { - var findings []Finding - var errs []string - repoName := strings.Split(repoFullName, "/")[1] - - // Fetch code scanning alerts - codeFindings, err := fetchCodeScanningAlerts(repoFullName) - if err != nil { - errs = append(errs, fmt.Sprintf("%s: code-scanning: %s", repoName, err)) - } - findings = append(findings, codeFindings...) - - // Fetch Dependabot alerts - depFindings, err := fetchDependabotAlerts(repoFullName) - if err != nil { - errs = append(errs, fmt.Sprintf("%s: dependabot: %s", repoName, err)) - } - findings = append(findings, depFindings...) - - // Fetch secret scanning alerts - secretFindings, err := fetchSecretScanningAlerts(repoFullName) - if err != nil { - errs = append(errs, fmt.Sprintf("%s: secret-scanning: %s", repoName, err)) - } - findings = append(findings, secretFindings...) - - return findings, errs -} - -// fetchCodeScanningAlerts fetches code scanning alerts -func fetchCodeScanningAlerts(repoFullName string) ([]Finding, error) { - args := []string{ - "api", - fmt.Sprintf("repos/%s/code-scanning/alerts", repoFullName), - } - - cmd := exec.Command("gh", args...) - output, err := cmd.Output() - if err != nil { - // Check for expected "not enabled" responses vs actual errors - if exitErr, ok := err.(*exec.ExitError); ok { - stderr := string(exitErr.Stderr) - // These are expected conditions, not errors - if strings.Contains(stderr, "Advanced Security must be enabled") || - strings.Contains(stderr, "no analysis found") || - strings.Contains(stderr, "Not Found") { - return nil, nil - } - } - return nil, errors.E("monitor.fetchCodeScanning", "API request failed", err) - } - - var alerts []CodeScanningAlert - if err := json.Unmarshal(output, &alerts); err != nil { - return nil, errors.E("monitor.fetchCodeScanning", "failed to parse response", err) - } - - repoName := strings.Split(repoFullName, "/")[1] - var findings []Finding - for _, alert := range alerts { - if alert.State != "open" { - continue - } - f := Finding{ - Source: alert.Tool.Name, - Severity: normalizeSeverity(alert.Rule.Severity), - Rule: alert.Rule.ID, - File: alert.MostRecentInstance.Location.Path, - Line: alert.MostRecentInstance.Location.StartLine, - Message: alert.MostRecentInstance.Message.Text, - URL: alert.HTMLURL, - State: alert.State, - RepoName: repoName, - CreatedAt: alert.CreatedAt, - Labels: []string{"type:security"}, - } - if f.Message == "" { - f.Message = alert.Rule.Description - } - findings = append(findings, f) - } - - return findings, nil -} - -// fetchDependabotAlerts fetches Dependabot alerts -func fetchDependabotAlerts(repoFullName string) ([]Finding, error) { - args := []string{ - "api", - fmt.Sprintf("repos/%s/dependabot/alerts", repoFullName), - } - - cmd := exec.Command("gh", args...) - output, err := cmd.Output() - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - stderr := string(exitErr.Stderr) - // Dependabot not enabled is expected - if strings.Contains(stderr, "Dependabot alerts are not enabled") || - strings.Contains(stderr, "Not Found") { - return nil, nil - } - } - return nil, errors.E("monitor.fetchDependabot", "API request failed", err) - } - - var alerts []DependabotAlert - if err := json.Unmarshal(output, &alerts); err != nil { - return nil, errors.E("monitor.fetchDependabot", "failed to parse response", err) - } - - repoName := strings.Split(repoFullName, "/")[1] - var findings []Finding - for _, alert := range alerts { - if alert.State != "open" { - continue - } - f := Finding{ - Source: "dependabot", - Severity: normalizeSeverity(alert.SecurityVulnerability.Severity), - Rule: alert.SecurityAdvisory.CVEID, - File: alert.Dependency.ManifestPath, - Line: 0, - Message: fmt.Sprintf("%s: %s", alert.SecurityVulnerability.Package.Name, alert.SecurityAdvisory.Summary), - URL: alert.HTMLURL, - State: alert.State, - RepoName: repoName, - CreatedAt: alert.CreatedAt, - Labels: []string{"type:security", "dependencies"}, - } - findings = append(findings, f) - } - - return findings, nil -} - -// fetchSecretScanningAlerts fetches secret scanning alerts -func fetchSecretScanningAlerts(repoFullName string) ([]Finding, error) { - args := []string{ - "api", - fmt.Sprintf("repos/%s/secret-scanning/alerts", repoFullName), - } - - cmd := exec.Command("gh", args...) - output, err := cmd.Output() - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - stderr := string(exitErr.Stderr) - // Secret scanning not enabled is expected - if strings.Contains(stderr, "Secret scanning is disabled") || - strings.Contains(stderr, "Not Found") { - return nil, nil - } - } - return nil, errors.E("monitor.fetchSecretScanning", "API request failed", err) - } - - var alerts []SecretScanningAlert - if err := json.Unmarshal(output, &alerts); err != nil { - return nil, errors.E("monitor.fetchSecretScanning", "failed to parse response", err) - } - - repoName := strings.Split(repoFullName, "/")[1] - var findings []Finding - for _, alert := range alerts { - if alert.State != "open" { - continue - } - f := Finding{ - Source: "secret-scanning", - Severity: "critical", // Secrets are always critical - Rule: alert.SecretType, - File: alert.LocationType, - Line: 0, - Message: fmt.Sprintf("Exposed %s detected", alert.SecretType), - URL: alert.HTMLURL, - State: alert.State, - RepoName: repoName, - CreatedAt: alert.CreatedAt, - Labels: []string{"type:security", "secrets"}, - } - findings = append(findings, f) - } - - return findings, nil -} - -// normalizeSeverity normalizes severity strings to standard values -func normalizeSeverity(s string) string { - s = strings.ToLower(s) - switch s { - case "critical", "crit": - return "critical" - case "high", "error": - return "high" - case "medium", "moderate", "warning": - return "medium" - case "low", "info", "note": - return "low" - default: - return "medium" - } -} - -// filterBySeverity filters findings by severity -func filterBySeverity(findings []Finding, severities []string) []Finding { - sevSet := make(map[string]bool) - for _, s := range severities { - sevSet[strings.ToLower(s)] = true - } - - var filtered []Finding - for _, f := range findings { - if sevSet[f.Severity] { - filtered = append(filtered, f) - } - } - return filtered -} - -// sortBySeverity sorts findings by severity (critical first) -func sortBySeverity(findings []Finding) { - severityOrder := map[string]int{ - "critical": 0, - "high": 1, - "medium": 2, - "low": 3, - } - - sort.Slice(findings, func(i, j int) bool { - oi := severityOrder[findings[i].Severity] - oj := severityOrder[findings[j].Severity] - if oi != oj { - return oi < oj - } - return findings[i].RepoName < findings[j].RepoName - }) -} - -// outputJSON outputs findings as JSON -func outputJSON(findings []Finding) error { - data, err := json.MarshalIndent(findings, "", " ") - if err != nil { - return errors.E("monitor", "failed to marshal findings", err) - } - cli.Print("%s\n", string(data)) - return nil -} - -// outputTable outputs findings as a formatted table -func outputTable(findings []Finding) error { - if len(findings) == 0 { - cli.Print("%s\n", successStyle.Render(i18n.T("cmd.monitor.no_findings"))) - return nil - } - - // Count by severity - counts := make(map[string]int) - for _, f := range findings { - counts[f.Severity]++ - } - - // Header summary - var parts []string - if counts["critical"] > 0 { - parts = append(parts, errorStyle.Render(fmt.Sprintf("%d critical", counts["critical"]))) - } - if counts["high"] > 0 { - parts = append(parts, errorStyle.Render(fmt.Sprintf("%d high", counts["high"]))) - } - if counts["medium"] > 0 { - parts = append(parts, warningStyle.Render(fmt.Sprintf("%d medium", counts["medium"]))) - } - if counts["low"] > 0 { - parts = append(parts, dimStyle.Render(fmt.Sprintf("%d low", counts["low"]))) - } - cli.Print("%s: %s\n", i18n.T("cmd.monitor.found"), strings.Join(parts, ", ")) - cli.Blank() - - // Group by repo - byRepo := make(map[string][]Finding) - for _, f := range findings { - byRepo[f.RepoName] = append(byRepo[f.RepoName], f) - } - - // Sort repos for consistent output - repoNames := make([]string, 0, len(byRepo)) - for repo := range byRepo { - repoNames = append(repoNames, repo) - } - sort.Strings(repoNames) - - // Print by repo - for _, repo := range repoNames { - repoFindings := byRepo[repo] - cli.Print("%s\n", cli.BoldStyle.Render(repo)) - for _, f := range repoFindings { - sevStyle := dimStyle - switch f.Severity { - case "critical", "high": - sevStyle = errorStyle - case "medium": - sevStyle = warningStyle - } - - // Format: [severity] source: message (file:line) - location := "" - if f.File != "" { - location = f.File - if f.Line > 0 { - location = fmt.Sprintf("%s:%d", f.File, f.Line) - } - } - - cli.Print(" %s %s: %s", - sevStyle.Render(fmt.Sprintf("[%s]", f.Severity)), - dimStyle.Render(f.Source), - truncate(f.Message, 60)) - if location != "" { - cli.Print(" %s", dimStyle.Render("("+location+")")) - } - cli.Blank() - } - cli.Blank() - } - - return nil -} - -// truncate truncates a string to max runes (Unicode-safe) -func truncate(s string, max int) string { - runes := []rune(s) - if len(runes) <= max { - return s - } - return string(runes[:max-3]) + "..." -} - -// detectRepoFromGit detects the repo from git remote -func detectRepoFromGit() (string, error) { - cmd := exec.Command("git", "remote", "get-url", "origin") - output, err := cmd.Output() - if err != nil { - return "", errors.E("monitor", i18n.T("cmd.monitor.error.not_git_repo"), err) - } - - url := strings.TrimSpace(string(output)) - return parseGitHubRepo(url) -} - -// detectOrgFromGit tries to detect the org from git remote -func detectOrgFromGit() string { - repo, err := detectRepoFromGit() - if err != nil { - return "" - } - parts := strings.Split(repo, "/") - if len(parts) >= 1 { - return parts[0] - } - return "" -} - -// parseGitHubRepo extracts org/repo from a git URL -func parseGitHubRepo(url string) (string, error) { - // Handle SSH URLs: git@github.com:org/repo.git - if strings.HasPrefix(url, "git@github.com:") { - path := strings.TrimPrefix(url, "git@github.com:") - path = strings.TrimSuffix(path, ".git") - return path, nil - } - - // Handle HTTPS URLs: https://github.com/org/repo.git - if strings.Contains(url, "github.com/") { - parts := strings.Split(url, "github.com/") - if len(parts) >= 2 { - path := strings.TrimSuffix(parts[1], ".git") - return path, nil - } - } - - return "", fmt.Errorf("could not parse GitHub repo from URL: %s", url) -} diff --git a/pkg/php/cmd.go b/pkg/php/cmd.go deleted file mode 100644 index 0f72bd9..0000000 --- a/pkg/php/cmd.go +++ /dev/null @@ -1,142 +0,0 @@ -package php - -import ( - "os" - "path/filepath" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/workspace" - "github.com/spf13/cobra" -) - -func init() { - cli.RegisterCommands(AddPHPCommands) -} - -// Style aliases from shared -var ( - successStyle = cli.SuccessStyle - errorStyle = cli.ErrorStyle - dimStyle = cli.DimStyle - linkStyle = cli.LinkStyle -) - -// Service colors for log output (domain-specific, keep local) -var ( - phpFrankenPHPStyle = cli.NewStyle().Foreground(cli.ColourIndigo500) - phpViteStyle = cli.NewStyle().Foreground(cli.ColourYellow500) - phpHorizonStyle = cli.NewStyle().Foreground(cli.ColourOrange500) - phpReverbStyle = cli.NewStyle().Foreground(cli.ColourViolet500) - phpRedisStyle = cli.NewStyle().Foreground(cli.ColourRed500) -) - -// Status styles (from shared) -var ( - phpStatusRunning = cli.SuccessStyle - phpStatusStopped = cli.DimStyle - phpStatusError = cli.ErrorStyle -) - -// QA command styles (from shared) -var ( - phpQAPassedStyle = cli.SuccessStyle - phpQAFailedStyle = cli.ErrorStyle - phpQAWarningStyle = cli.WarningStyle - phpQAStageStyle = cli.HeaderStyle -) - -// Security severity styles (from shared) -var ( - phpSecurityCriticalStyle = cli.NewStyle().Bold().Foreground(cli.ColourRed500) - phpSecurityHighStyle = cli.NewStyle().Bold().Foreground(cli.ColourOrange500) - phpSecurityMediumStyle = cli.NewStyle().Foreground(cli.ColourAmber500) - phpSecurityLowStyle = cli.NewStyle().Foreground(cli.ColourGray500) -) - -// AddPHPCommands adds PHP/Laravel development commands. -func AddPHPCommands(root *cobra.Command) { - phpCmd := &cobra.Command{ - Use: "php", - Short: i18n.T("cmd.php.short"), - Long: i18n.T("cmd.php.long"), - PersistentPreRunE: func(cmd *cobra.Command, args []string) error { - // Check if we are in a workspace root - wsRoot, err := workspace.FindWorkspaceRoot() - if err != nil { - return nil // Not in a workspace, regular behavior - } - - // Load workspace config - config, err := workspace.LoadConfig(wsRoot) - if err != nil || config == nil { - return nil // Failed to load or no config, ignore - } - - if config.Active == "" { - return nil // No active package - } - - // Calculate package path - pkgDir := config.PackagesDir - if pkgDir == "" { - pkgDir = "./packages" - } - if !filepath.IsAbs(pkgDir) { - pkgDir = filepath.Join(wsRoot, pkgDir) - } - - targetDir := filepath.Join(pkgDir, config.Active) - - // Check if target directory exists - if _, err := os.Stat(targetDir); err != nil { - cli.Warnf("Active package directory not found: %s", targetDir) - return nil - } - - // Change working directory - if err := os.Chdir(targetDir); err != nil { - return cli.Err("failed to change directory to active package: %w", err) - } - - cli.Print("%s %s\n", dimStyle.Render("Workspace:"), config.Active) - return nil - }, - } - root.AddCommand(phpCmd) - - - // Development - addPHPDevCommand(phpCmd) - addPHPLogsCommand(phpCmd) - addPHPStopCommand(phpCmd) - addPHPStatusCommand(phpCmd) - addPHPSSLCommand(phpCmd) - - // Build & Deploy - addPHPBuildCommand(phpCmd) - addPHPServeCommand(phpCmd) - addPHPShellCommand(phpCmd) - - // Quality (existing) - addPHPTestCommand(phpCmd) - addPHPFmtCommand(phpCmd) - addPHPStanCommand(phpCmd) - - // Quality (new) - addPHPPsalmCommand(phpCmd) - addPHPAuditCommand(phpCmd) - addPHPSecurityCommand(phpCmd) - addPHPQACommand(phpCmd) - addPHPRectorCommand(phpCmd) - addPHPInfectionCommand(phpCmd) - - // CI/CD Integration - addPHPCICommand(phpCmd) - - // Package Management - addPHPPackagesCommands(phpCmd) - - // Deployment - addPHPDeployCommands(phpCmd) -} \ No newline at end of file diff --git a/pkg/php/cmd_build.go b/pkg/php/cmd_build.go deleted file mode 100644 index 0820572..0000000 --- a/pkg/php/cmd_build.go +++ /dev/null @@ -1,291 +0,0 @@ -package php - -import ( - "context" - "errors" - "os" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -var ( - buildType string - buildImageName string - buildTag string - buildPlatform string - buildDockerfile string - buildOutputPath string - buildFormat string - buildTemplate string - buildNoCache bool -) - -func addPHPBuildCommand(parent *cobra.Command) { - buildCmd := &cobra.Command{ - Use: "build", - Short: i18n.T("cmd.php.build.short"), - Long: i18n.T("cmd.php.build.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - ctx := context.Background() - - switch strings.ToLower(buildType) { - case "linuxkit": - return runPHPBuildLinuxKit(ctx, cwd, linuxKitBuildOptions{ - OutputPath: buildOutputPath, - Format: buildFormat, - Template: buildTemplate, - }) - default: - return runPHPBuildDocker(ctx, cwd, dockerBuildOptions{ - ImageName: buildImageName, - Tag: buildTag, - Platform: buildPlatform, - Dockerfile: buildDockerfile, - NoCache: buildNoCache, - }) - } - }, - } - - buildCmd.Flags().StringVar(&buildType, "type", "", i18n.T("cmd.php.build.flag.type")) - buildCmd.Flags().StringVar(&buildImageName, "name", "", i18n.T("cmd.php.build.flag.name")) - buildCmd.Flags().StringVar(&buildTag, "tag", "", i18n.T("common.flag.tag")) - buildCmd.Flags().StringVar(&buildPlatform, "platform", "", i18n.T("cmd.php.build.flag.platform")) - buildCmd.Flags().StringVar(&buildDockerfile, "dockerfile", "", i18n.T("cmd.php.build.flag.dockerfile")) - buildCmd.Flags().StringVar(&buildOutputPath, "output", "", i18n.T("cmd.php.build.flag.output")) - buildCmd.Flags().StringVar(&buildFormat, "format", "", i18n.T("cmd.php.build.flag.format")) - buildCmd.Flags().StringVar(&buildTemplate, "template", "", i18n.T("cmd.php.build.flag.template")) - buildCmd.Flags().BoolVar(&buildNoCache, "no-cache", false, i18n.T("cmd.php.build.flag.no_cache")) - - parent.AddCommand(buildCmd) -} - -type dockerBuildOptions struct { - ImageName string - Tag string - Platform string - Dockerfile string - NoCache bool -} - -type linuxKitBuildOptions struct { - OutputPath string - Format string - Template string -} - -func runPHPBuildDocker(ctx context.Context, projectDir string, opts dockerBuildOptions) error { - if !IsPHPProject(projectDir) { - return errors.New(i18n.T("cmd.php.error.not_php")) - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.T("cmd.php.build.building_docker")) - - // Show detected configuration - config, err := DetectDockerfileConfig(projectDir) - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.detect", "project configuration"), err) - } - - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.build.php_version")), config.PHPVersion) - cli.Print("%s %v\n", dimStyle.Render(i18n.T("cmd.php.build.laravel")), config.IsLaravel) - cli.Print("%s %v\n", dimStyle.Render(i18n.T("cmd.php.build.octane")), config.HasOctane) - cli.Print("%s %v\n", dimStyle.Render(i18n.T("cmd.php.build.frontend")), config.HasAssets) - if len(config.PHPExtensions) > 0 { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.build.extensions")), strings.Join(config.PHPExtensions, ", ")) - } - cli.Blank() - - // Build options - buildOpts := DockerBuildOptions{ - ProjectDir: projectDir, - ImageName: opts.ImageName, - Tag: opts.Tag, - Platform: opts.Platform, - Dockerfile: opts.Dockerfile, - NoBuildCache: opts.NoCache, - Output: os.Stdout, - } - - if buildOpts.ImageName == "" { - buildOpts.ImageName = GetLaravelAppName(projectDir) - if buildOpts.ImageName == "" { - buildOpts.ImageName = "php-app" - } - // Sanitize for Docker - buildOpts.ImageName = strings.ToLower(strings.ReplaceAll(buildOpts.ImageName, " ", "-")) - } - - if buildOpts.Tag == "" { - buildOpts.Tag = "latest" - } - - cli.Print("%s %s:%s\n", dimStyle.Render(i18n.Label("image")), buildOpts.ImageName, buildOpts.Tag) - if opts.Platform != "" { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.build.platform")), opts.Platform) - } - cli.Blank() - - if err := BuildDocker(ctx, buildOpts); err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.build"), err) - } - - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("common.success.completed", map[string]any{"Action": "Docker image built"})) - cli.Print("%s docker run -p 80:80 -p 443:443 %s:%s\n", - dimStyle.Render(i18n.T("cmd.php.build.docker_run_with")), - buildOpts.ImageName, buildOpts.Tag) - - return nil -} - -func runPHPBuildLinuxKit(ctx context.Context, projectDir string, opts linuxKitBuildOptions) error { - if !IsPHPProject(projectDir) { - return errors.New(i18n.T("cmd.php.error.not_php")) - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.T("cmd.php.build.building_linuxkit")) - - buildOpts := LinuxKitBuildOptions{ - ProjectDir: projectDir, - OutputPath: opts.OutputPath, - Format: opts.Format, - Template: opts.Template, - Output: os.Stdout, - } - - if buildOpts.Format == "" { - buildOpts.Format = "qcow2" - } - if buildOpts.Template == "" { - buildOpts.Template = "server-php" - } - - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("template")), buildOpts.Template) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.build.format")), buildOpts.Format) - cli.Blank() - - if err := BuildLinuxKit(ctx, buildOpts); err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.build"), err) - } - - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("common.success.completed", map[string]any{"Action": "LinuxKit image built"})) - return nil -} - -var ( - serveImageName string - serveTag string - serveContainerName string - servePort int - serveHTTPSPort int - serveDetach bool - serveEnvFile string -) - -func addPHPServeCommand(parent *cobra.Command) { - serveCmd := &cobra.Command{ - Use: "serve", - Short: i18n.T("cmd.php.serve.short"), - Long: i18n.T("cmd.php.serve.long"), - RunE: func(cmd *cobra.Command, args []string) error { - imageName := serveImageName - if imageName == "" { - // Try to detect from current directory - cwd, err := os.Getwd() - if err == nil { - imageName = GetLaravelAppName(cwd) - if imageName != "" { - imageName = strings.ToLower(strings.ReplaceAll(imageName, " ", "-")) - } - } - if imageName == "" { - return errors.New(i18n.T("cmd.php.serve.name_required")) - } - } - - ctx := context.Background() - - opts := ServeOptions{ - ImageName: imageName, - Tag: serveTag, - ContainerName: serveContainerName, - Port: servePort, - HTTPSPort: serveHTTPSPort, - Detach: serveDetach, - EnvFile: serveEnvFile, - Output: os.Stdout, - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.ProgressSubject("run", "production container")) - cli.Print("%s %s:%s\n", dimStyle.Render(i18n.Label("image")), imageName, func() string { - if serveTag == "" { - return "latest" - } - return serveTag - }()) - - effectivePort := servePort - if effectivePort == 0 { - effectivePort = 80 - } - effectiveHTTPSPort := serveHTTPSPort - if effectiveHTTPSPort == 0 { - effectiveHTTPSPort = 443 - } - - cli.Print("%s http://localhost:%d, https://localhost:%d\n", - dimStyle.Render("Ports:"), effectivePort, effectiveHTTPSPort) - cli.Blank() - - if err := ServeProduction(ctx, opts); err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.start", "container"), err) - } - - if !serveDetach { - cli.Print("\n%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.T("cmd.php.serve.stopped")) - } - - return nil - }, - } - - serveCmd.Flags().StringVar(&serveImageName, "name", "", i18n.T("cmd.php.serve.flag.name")) - serveCmd.Flags().StringVar(&serveTag, "tag", "", i18n.T("common.flag.tag")) - serveCmd.Flags().StringVar(&serveContainerName, "container", "", i18n.T("cmd.php.serve.flag.container")) - serveCmd.Flags().IntVar(&servePort, "port", 0, i18n.T("cmd.php.serve.flag.port")) - serveCmd.Flags().IntVar(&serveHTTPSPort, "https-port", 0, i18n.T("cmd.php.serve.flag.https_port")) - serveCmd.Flags().BoolVarP(&serveDetach, "detach", "d", false, i18n.T("cmd.php.serve.flag.detach")) - serveCmd.Flags().StringVar(&serveEnvFile, "env-file", "", i18n.T("cmd.php.serve.flag.env_file")) - - parent.AddCommand(serveCmd) -} - -func addPHPShellCommand(parent *cobra.Command) { - shellCmd := &cobra.Command{ - Use: "shell [container]", - Short: i18n.T("cmd.php.shell.short"), - Long: i18n.T("cmd.php.shell.long"), - Args: cobra.ExactArgs(1), - RunE: func(cmd *cobra.Command, args []string) error { - ctx := context.Background() - - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.T("cmd.php.shell.opening", map[string]interface{}{"Container": args[0]})) - - if err := Shell(ctx, args[0]); err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.open", "shell"), err) - } - - return nil - }, - } - - parent.AddCommand(shellCmd) -} diff --git a/pkg/php/cmd_ci.go b/pkg/php/cmd_ci.go deleted file mode 100644 index 3550b12..0000000 --- a/pkg/php/cmd_ci.go +++ /dev/null @@ -1,562 +0,0 @@ -// cmd_ci.go implements the 'php ci' command for CI/CD pipeline integration. -// -// Usage: -// core php ci # Run full CI pipeline -// core php ci --json # Output combined JSON report -// core php ci --summary # Output markdown summary -// core php ci --sarif # Generate SARIF files -// core php ci --upload-sarif # Upload SARIF to GitHub Security -// core php ci --fail-on=high # Only fail on high+ severity - -package php - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - "time" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -// CI command flags -var ( - ciJSON bool - ciSummary bool - ciSARIF bool - ciUploadSARIF bool - ciFailOn string -) - -// CIResult represents the overall CI pipeline result -type CIResult struct { - Passed bool `json:"passed"` - ExitCode int `json:"exit_code"` - Duration string `json:"duration"` - StartedAt time.Time `json:"started_at"` - Checks []CICheckResult `json:"checks"` - Summary CISummary `json:"summary"` - Artifacts []string `json:"artifacts,omitempty"` -} - -// CICheckResult represents an individual check result -type CICheckResult struct { - Name string `json:"name"` - Status string `json:"status"` // passed, failed, warning, skipped - Duration string `json:"duration"` - Details string `json:"details,omitempty"` - Issues int `json:"issues,omitempty"` - Errors int `json:"errors,omitempty"` - Warnings int `json:"warnings,omitempty"` -} - -// CISummary contains aggregate statistics -type CISummary struct { - Total int `json:"total"` - Passed int `json:"passed"` - Failed int `json:"failed"` - Warnings int `json:"warnings"` - Skipped int `json:"skipped"` -} - -func addPHPCICommand(parent *cobra.Command) { - ciCmd := &cobra.Command{ - Use: "ci", - Short: i18n.T("cmd.php.ci.short"), - Long: i18n.T("cmd.php.ci.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return runPHPCI() - }, - } - - ciCmd.Flags().BoolVar(&ciJSON, "json", false, i18n.T("cmd.php.ci.flag.json")) - ciCmd.Flags().BoolVar(&ciSummary, "summary", false, i18n.T("cmd.php.ci.flag.summary")) - ciCmd.Flags().BoolVar(&ciSARIF, "sarif", false, i18n.T("cmd.php.ci.flag.sarif")) - ciCmd.Flags().BoolVar(&ciUploadSARIF, "upload-sarif", false, i18n.T("cmd.php.ci.flag.upload_sarif")) - ciCmd.Flags().StringVar(&ciFailOn, "fail-on", "error", i18n.T("cmd.php.ci.flag.fail_on")) - - parent.AddCommand(ciCmd) -} - -func runPHPCI() error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - if !IsPHPProject(cwd) { - return errors.New(i18n.T("cmd.php.error.not_php")) - } - - startTime := time.Now() - ctx := context.Background() - - // Define checks to run in order - checks := []struct { - name string - run func(context.Context, string) (CICheckResult, error) - sarif bool // Whether this check can generate SARIF - }{ - {"test", runCITest, false}, - {"stan", runCIStan, true}, - {"psalm", runCIPsalm, true}, - {"fmt", runCIFmt, false}, - {"audit", runCIAudit, false}, - {"security", runCISecurity, false}, - } - - result := CIResult{ - StartedAt: startTime, - Passed: true, - Checks: make([]CICheckResult, 0, len(checks)), - } - - var artifacts []string - - // Print header unless JSON output - if !ciJSON { - cli.Print("\n%s\n", cli.BoldStyle.Render("core php ci - QA Pipeline")) - cli.Print("%s\n\n", strings.Repeat("─", 40)) - } - - // Run each check - for _, check := range checks { - if !ciJSON { - cli.Print(" %s %s...", dimStyle.Render("→"), check.name) - } - - checkResult, err := check.run(ctx, cwd) - if err != nil { - checkResult = CICheckResult{ - Name: check.name, - Status: "failed", - Details: err.Error(), - } - } - - result.Checks = append(result.Checks, checkResult) - - // Update summary - result.Summary.Total++ - switch checkResult.Status { - case "passed": - result.Summary.Passed++ - case "failed": - result.Summary.Failed++ - if shouldFailOn(checkResult, ciFailOn) { - result.Passed = false - } - case "warning": - result.Summary.Warnings++ - case "skipped": - result.Summary.Skipped++ - } - - // Print result - if !ciJSON { - cli.Print("\r %s %s %s\n", getStatusIcon(checkResult.Status), check.name, dimStyle.Render(checkResult.Details)) - } - - // Generate SARIF if requested - if (ciSARIF || ciUploadSARIF) && check.sarif { - sarifFile := filepath.Join(cwd, check.name+".sarif") - if generateSARIF(ctx, cwd, check.name, sarifFile) == nil { - artifacts = append(artifacts, sarifFile) - } - } - } - - result.Duration = time.Since(startTime).Round(time.Millisecond).String() - result.Artifacts = artifacts - - // Set exit code - if result.Passed { - result.ExitCode = 0 - } else { - result.ExitCode = 1 - } - - // Output based on flags - if ciJSON { - if err := outputCIJSON(result); err != nil { - return err - } - if !result.Passed { - os.Exit(result.ExitCode) - } - return nil - } - - if ciSummary { - if err := outputCISummary(result); err != nil { - return err - } - if !result.Passed { - return cli.Err("CI pipeline failed") - } - return nil - } - - // Default table output - cli.Print("\n%s\n", strings.Repeat("─", 40)) - - if result.Passed { - cli.Print("%s %s\n", successStyle.Render("✓ CI PASSED"), dimStyle.Render(result.Duration)) - } else { - cli.Print("%s %s\n", errorStyle.Render("✗ CI FAILED"), dimStyle.Render(result.Duration)) - } - - if len(artifacts) > 0 { - cli.Print("\n%s\n", dimStyle.Render("Artifacts:")) - for _, a := range artifacts { - cli.Print(" → %s\n", filepath.Base(a)) - } - } - - // Upload SARIF if requested - if ciUploadSARIF && len(artifacts) > 0 { - cli.Blank() - for _, sarifFile := range artifacts { - if err := uploadSARIFToGitHub(ctx, sarifFile); err != nil { - cli.Print(" %s %s: %s\n", errorStyle.Render("✗"), filepath.Base(sarifFile), err) - } else { - cli.Print(" %s %s uploaded\n", successStyle.Render("✓"), filepath.Base(sarifFile)) - } - } - } - - if !result.Passed { - return cli.Err("CI pipeline failed") - } - return nil -} - -// runCITest runs Pest/PHPUnit tests -func runCITest(ctx context.Context, dir string) (CICheckResult, error) { - start := time.Now() - result := CICheckResult{Name: "test", Status: "passed"} - - opts := TestOptions{ - Dir: dir, - Output: nil, // Suppress output - } - - if err := RunTests(ctx, opts); err != nil { - result.Status = "failed" - result.Details = err.Error() - } else { - result.Details = "all tests passed" - } - - result.Duration = time.Since(start).Round(time.Millisecond).String() - return result, nil -} - -// runCIStan runs PHPStan -func runCIStan(ctx context.Context, dir string) (CICheckResult, error) { - start := time.Now() - result := CICheckResult{Name: "stan", Status: "passed"} - - _, found := DetectAnalyser(dir) - if !found { - result.Status = "skipped" - result.Details = "PHPStan not configured" - return result, nil - } - - opts := AnalyseOptions{ - Dir: dir, - Output: nil, - } - - if err := Analyse(ctx, opts); err != nil { - result.Status = "failed" - result.Details = "errors found" - } else { - result.Details = "0 errors" - } - - result.Duration = time.Since(start).Round(time.Millisecond).String() - return result, nil -} - -// runCIPsalm runs Psalm -func runCIPsalm(ctx context.Context, dir string) (CICheckResult, error) { - start := time.Now() - result := CICheckResult{Name: "psalm", Status: "passed"} - - _, found := DetectPsalm(dir) - if !found { - result.Status = "skipped" - result.Details = "Psalm not configured" - return result, nil - } - - opts := PsalmOptions{ - Dir: dir, - Output: nil, - } - - if err := RunPsalm(ctx, opts); err != nil { - result.Status = "failed" - result.Details = "errors found" - } else { - result.Details = "0 errors" - } - - result.Duration = time.Since(start).Round(time.Millisecond).String() - return result, nil -} - -// runCIFmt checks code formatting -func runCIFmt(ctx context.Context, dir string) (CICheckResult, error) { - start := time.Now() - result := CICheckResult{Name: "fmt", Status: "passed"} - - _, found := DetectFormatter(dir) - if !found { - result.Status = "skipped" - result.Details = "no formatter configured" - return result, nil - } - - opts := FormatOptions{ - Dir: dir, - Fix: false, // Check only - Output: nil, - } - - if err := Format(ctx, opts); err != nil { - result.Status = "warning" - result.Details = "formatting issues" - } else { - result.Details = "code style OK" - } - - result.Duration = time.Since(start).Round(time.Millisecond).String() - return result, nil -} - -// runCIAudit runs composer audit -func runCIAudit(ctx context.Context, dir string) (CICheckResult, error) { - start := time.Now() - result := CICheckResult{Name: "audit", Status: "passed"} - - results, err := RunAudit(ctx, AuditOptions{ - Dir: dir, - Output: nil, - }) - if err != nil { - result.Status = "failed" - result.Details = err.Error() - result.Duration = time.Since(start).Round(time.Millisecond).String() - return result, nil - } - - totalVulns := 0 - for _, r := range results { - totalVulns += r.Vulnerabilities - } - - if totalVulns > 0 { - result.Status = "failed" - result.Details = fmt.Sprintf("%d vulnerabilities", totalVulns) - result.Issues = totalVulns - } else { - result.Details = "no vulnerabilities" - } - - result.Duration = time.Since(start).Round(time.Millisecond).String() - return result, nil -} - -// runCISecurity runs security checks -func runCISecurity(ctx context.Context, dir string) (CICheckResult, error) { - start := time.Now() - result := CICheckResult{Name: "security", Status: "passed"} - - secResult, err := RunSecurityChecks(ctx, SecurityOptions{ - Dir: dir, - Output: nil, - }) - if err != nil { - result.Status = "failed" - result.Details = err.Error() - result.Duration = time.Since(start).Round(time.Millisecond).String() - return result, nil - } - - if secResult.Summary.Critical > 0 || secResult.Summary.High > 0 { - result.Status = "failed" - result.Details = fmt.Sprintf("%d critical, %d high", secResult.Summary.Critical, secResult.Summary.High) - result.Issues = secResult.Summary.Critical + secResult.Summary.High - } else if secResult.Summary.Medium > 0 { - result.Status = "warning" - result.Details = fmt.Sprintf("%d medium issues", secResult.Summary.Medium) - result.Warnings = secResult.Summary.Medium - } else { - result.Details = "no issues" - } - - result.Duration = time.Since(start).Round(time.Millisecond).String() - return result, nil -} - -// shouldFailOn determines if a check should cause CI failure based on --fail-on -func shouldFailOn(check CICheckResult, level string) bool { - switch level { - case "critical": - return check.Status == "failed" && check.Issues > 0 - case "high", "error": - return check.Status == "failed" - case "warning": - return check.Status == "failed" || check.Status == "warning" - default: - return check.Status == "failed" - } -} - -// getStatusIcon returns the icon for a check status -func getStatusIcon(status string) string { - switch status { - case "passed": - return successStyle.Render("✓") - case "failed": - return errorStyle.Render("✗") - case "warning": - return phpQAWarningStyle.Render("⚠") - case "skipped": - return dimStyle.Render("-") - default: - return dimStyle.Render("?") - } -} - -// outputCIJSON outputs the result as JSON -func outputCIJSON(result CIResult) error { - data, err := json.MarshalIndent(result, "", " ") - if err != nil { - return err - } - fmt.Println(string(data)) - return nil -} - -// outputCISummary outputs a markdown summary -func outputCISummary(result CIResult) error { - var sb strings.Builder - - sb.WriteString("## CI Pipeline Results\n\n") - - if result.Passed { - sb.WriteString("**Status:** ✅ Passed\n\n") - } else { - sb.WriteString("**Status:** ❌ Failed\n\n") - } - - sb.WriteString("| Check | Status | Details |\n") - sb.WriteString("|-------|--------|----------|\n") - - for _, check := range result.Checks { - icon := "✅" - switch check.Status { - case "failed": - icon = "❌" - case "warning": - icon = "⚠️" - case "skipped": - icon = "⏭️" - } - sb.WriteString(fmt.Sprintf("| %s | %s | %s |\n", check.Name, icon, check.Details)) - } - - sb.WriteString(fmt.Sprintf("\n**Duration:** %s\n", result.Duration)) - - fmt.Print(sb.String()) - return nil -} - -// generateSARIF generates a SARIF file for a specific check -func generateSARIF(ctx context.Context, dir, checkName, outputFile string) error { - var args []string - - switch checkName { - case "stan": - args = []string{"vendor/bin/phpstan", "analyse", "--error-format=sarif", "--no-progress"} - case "psalm": - args = []string{"vendor/bin/psalm", "--output-format=sarif"} - default: - return fmt.Errorf("SARIF not supported for %s", checkName) - } - - cmd := exec.CommandContext(ctx, "php", args...) - cmd.Dir = dir - - // Capture output - command may exit non-zero when issues are found - // but still produce valid SARIF output - output, err := cmd.CombinedOutput() - if len(output) == 0 { - if err != nil { - return fmt.Errorf("failed to generate SARIF: %w", err) - } - return fmt.Errorf("no SARIF output generated") - } - - // Validate output is valid JSON - var js json.RawMessage - if err := json.Unmarshal(output, &js); err != nil { - return fmt.Errorf("invalid SARIF output: %w", err) - } - - return os.WriteFile(outputFile, output, 0644) -} - -// uploadSARIFToGitHub uploads a SARIF file to GitHub Security tab -func uploadSARIFToGitHub(ctx context.Context, sarifFile string) error { - // Validate commit SHA before calling API - sha := getGitSHA() - if sha == "" { - return errors.New("cannot upload SARIF: git commit SHA not available (ensure you're in a git repository)") - } - - // Use gh CLI to upload - cmd := exec.CommandContext(ctx, "gh", "api", - "repos/{owner}/{repo}/code-scanning/sarifs", - "-X", "POST", - "-F", "sarif=@"+sarifFile, - "-F", "ref="+getGitRef(), - "-F", "commit_sha="+sha, - ) - - if output, err := cmd.CombinedOutput(); err != nil { - return fmt.Errorf("%s: %s", err, string(output)) - } - return nil -} - -// getGitRef returns the current git ref -func getGitRef() string { - cmd := exec.Command("git", "symbolic-ref", "HEAD") - output, err := cmd.Output() - if err != nil { - return "refs/heads/main" - } - return strings.TrimSpace(string(output)) -} - -// getGitSHA returns the current git commit SHA -func getGitSHA() string { - cmd := exec.Command("git", "rev-parse", "HEAD") - output, err := cmd.Output() - if err != nil { - return "" - } - return strings.TrimSpace(string(output)) -} diff --git a/pkg/php/cmd_commands.go b/pkg/php/cmd_commands.go deleted file mode 100644 index c0a2444..0000000 --- a/pkg/php/cmd_commands.go +++ /dev/null @@ -1,41 +0,0 @@ -// Package php provides Laravel/PHP development and deployment commands. -// -// Development Commands: -// - dev: Start Laravel environment (FrankenPHP, Vite, Horizon, Reverb, Redis) -// - logs: Stream unified service logs -// - stop: Stop all running services -// - status: Show service status -// - ssl: Setup SSL certificates with mkcert -// -// Build Commands: -// - build: Build Docker or LinuxKit image -// - serve: Run production container -// - shell: Open shell in running container -// -// Code Quality: -// - test: Run PHPUnit/Pest tests -// - fmt: Format code with Laravel Pint -// - stan: Run PHPStan/Larastan static analysis -// - psalm: Run Psalm static analysis -// - audit: Security audit for dependencies -// - security: Security vulnerability scanning -// - qa: Run full QA pipeline -// - rector: Automated code refactoring -// - infection: Mutation testing for test quality -// -// Package Management: -// - packages link/unlink/update/list: Manage local Composer packages -// -// Deployment (Coolify): -// - deploy: Deploy to Coolify -// - deploy:status: Check deployment status -// - deploy:rollback: Rollback deployment -// - deploy:list: List recent deployments -package php - -import "github.com/spf13/cobra" - -// AddCommands registers the 'php' command and all subcommands. -func AddCommands(root *cobra.Command) { - AddPHPCommands(root) -} diff --git a/pkg/php/cmd_deploy.go b/pkg/php/cmd_deploy.go deleted file mode 100644 index 8df138a..0000000 --- a/pkg/php/cmd_deploy.go +++ /dev/null @@ -1,361 +0,0 @@ -package php - -import ( - "context" - "os" - "time" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -// Deploy command styles (aliases to shared) -var ( - phpDeployStyle = cli.SuccessStyle - phpDeployPendingStyle = cli.WarningStyle - phpDeployFailedStyle = cli.ErrorStyle -) - -func addPHPDeployCommands(parent *cobra.Command) { - // Main deploy command - addPHPDeployCommand(parent) - - // Deploy status subcommand (using colon notation: deploy:status) - addPHPDeployStatusCommand(parent) - - // Deploy rollback subcommand - addPHPDeployRollbackCommand(parent) - - // Deploy list subcommand - addPHPDeployListCommand(parent) -} - -var ( - deployStaging bool - deployForce bool - deployWait bool -) - -func addPHPDeployCommand(parent *cobra.Command) { - deployCmd := &cobra.Command{ - Use: "deploy", - Short: i18n.T("cmd.php.deploy.short"), - Long: i18n.T("cmd.php.deploy.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - env := EnvProduction - if deployStaging { - env = EnvStaging - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.deploy")), i18n.T("cmd.php.deploy.deploying", map[string]interface{}{"Environment": env})) - - ctx := context.Background() - - opts := DeployOptions{ - Dir: cwd, - Environment: env, - Force: deployForce, - Wait: deployWait, - } - - status, err := Deploy(ctx, opts) - if err != nil { - return cli.Err("%s: %w", i18n.T("cmd.php.error.deploy_failed"), err) - } - - printDeploymentStatus(status) - - if deployWait { - if IsDeploymentSuccessful(status.Status) { - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("common.success.completed", map[string]any{"Action": "Deployment completed"})) - } else { - cli.Print("\n%s %s\n", errorStyle.Render(i18n.Label("warning")), i18n.T("cmd.php.deploy.warning_status", map[string]interface{}{"Status": status.Status})) - } - } else { - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("cmd.php.deploy.triggered")) - } - - return nil - }, - } - - deployCmd.Flags().BoolVar(&deployStaging, "staging", false, i18n.T("cmd.php.deploy.flag.staging")) - deployCmd.Flags().BoolVar(&deployForce, "force", false, i18n.T("cmd.php.deploy.flag.force")) - deployCmd.Flags().BoolVar(&deployWait, "wait", false, i18n.T("cmd.php.deploy.flag.wait")) - - parent.AddCommand(deployCmd) -} - -var ( - deployStatusStaging bool - deployStatusDeploymentID string -) - -func addPHPDeployStatusCommand(parent *cobra.Command) { - statusCmd := &cobra.Command{ - Use: "deploy:status", - Short: i18n.T("cmd.php.deploy_status.short"), - Long: i18n.T("cmd.php.deploy_status.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - env := EnvProduction - if deployStatusStaging { - env = EnvStaging - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.deploy")), i18n.ProgressSubject("check", "deployment status")) - - ctx := context.Background() - - opts := StatusOptions{ - Dir: cwd, - Environment: env, - DeploymentID: deployStatusDeploymentID, - } - - status, err := DeployStatus(ctx, opts) - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "status"), err) - } - - printDeploymentStatus(status) - - return nil - }, - } - - statusCmd.Flags().BoolVar(&deployStatusStaging, "staging", false, i18n.T("cmd.php.deploy_status.flag.staging")) - statusCmd.Flags().StringVar(&deployStatusDeploymentID, "id", "", i18n.T("cmd.php.deploy_status.flag.id")) - - parent.AddCommand(statusCmd) -} - -var ( - rollbackStaging bool - rollbackDeploymentID string - rollbackWait bool -) - -func addPHPDeployRollbackCommand(parent *cobra.Command) { - rollbackCmd := &cobra.Command{ - Use: "deploy:rollback", - Short: i18n.T("cmd.php.deploy_rollback.short"), - Long: i18n.T("cmd.php.deploy_rollback.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - env := EnvProduction - if rollbackStaging { - env = EnvStaging - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.deploy")), i18n.T("cmd.php.deploy_rollback.rolling_back", map[string]interface{}{"Environment": env})) - - ctx := context.Background() - - opts := RollbackOptions{ - Dir: cwd, - Environment: env, - DeploymentID: rollbackDeploymentID, - Wait: rollbackWait, - } - - status, err := Rollback(ctx, opts) - if err != nil { - return cli.Err("%s: %w", i18n.T("cmd.php.error.rollback_failed"), err) - } - - printDeploymentStatus(status) - - if rollbackWait { - if IsDeploymentSuccessful(status.Status) { - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("common.success.completed", map[string]any{"Action": "Rollback completed"})) - } else { - cli.Print("\n%s %s\n", errorStyle.Render(i18n.Label("warning")), i18n.T("cmd.php.deploy_rollback.warning_status", map[string]interface{}{"Status": status.Status})) - } - } else { - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("cmd.php.deploy_rollback.triggered")) - } - - return nil - }, - } - - rollbackCmd.Flags().BoolVar(&rollbackStaging, "staging", false, i18n.T("cmd.php.deploy_rollback.flag.staging")) - rollbackCmd.Flags().StringVar(&rollbackDeploymentID, "id", "", i18n.T("cmd.php.deploy_rollback.flag.id")) - rollbackCmd.Flags().BoolVar(&rollbackWait, "wait", false, i18n.T("cmd.php.deploy_rollback.flag.wait")) - - parent.AddCommand(rollbackCmd) -} - -var ( - deployListStaging bool - deployListLimit int -) - -func addPHPDeployListCommand(parent *cobra.Command) { - listCmd := &cobra.Command{ - Use: "deploy:list", - Short: i18n.T("cmd.php.deploy_list.short"), - Long: i18n.T("cmd.php.deploy_list.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - env := EnvProduction - if deployListStaging { - env = EnvStaging - } - - limit := deployListLimit - if limit == 0 { - limit = 10 - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.deploy")), i18n.T("cmd.php.deploy_list.recent", map[string]interface{}{"Environment": env})) - - ctx := context.Background() - - deployments, err := ListDeployments(ctx, cwd, env, limit) - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.list", "deployments"), err) - } - - if len(deployments) == 0 { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.info")), i18n.T("cmd.php.deploy_list.none_found")) - return nil - } - - for i, d := range deployments { - printDeploymentSummary(i+1, &d) - } - - return nil - }, - } - - listCmd.Flags().BoolVar(&deployListStaging, "staging", false, i18n.T("cmd.php.deploy_list.flag.staging")) - listCmd.Flags().IntVar(&deployListLimit, "limit", 0, i18n.T("cmd.php.deploy_list.flag.limit")) - - parent.AddCommand(listCmd) -} - -func printDeploymentStatus(status *DeploymentStatus) { - // Status with color - statusStyle := phpDeployStyle - switch status.Status { - case "queued", "building", "deploying", "pending", "rolling_back": - statusStyle = phpDeployPendingStyle - case "failed", "error", "cancelled": - statusStyle = phpDeployFailedStyle - } - - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("status")), statusStyle.Render(status.Status)) - - if status.ID != "" { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.id")), status.ID) - } - - if status.URL != "" { - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("url")), linkStyle.Render(status.URL)) - } - - if status.Branch != "" { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.branch")), status.Branch) - } - - if status.Commit != "" { - commit := status.Commit - if len(commit) > 7 { - commit = commit[:7] - } - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.commit")), commit) - if status.CommitMessage != "" { - // Truncate long messages - msg := status.CommitMessage - if len(msg) > 60 { - msg = msg[:57] + "..." - } - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.message")), msg) - } - } - - if !status.StartedAt.IsZero() { - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("started")), status.StartedAt.Format(time.RFC3339)) - } - - if !status.CompletedAt.IsZero() { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.completed")), status.CompletedAt.Format(time.RFC3339)) - if !status.StartedAt.IsZero() { - duration := status.CompletedAt.Sub(status.StartedAt) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.duration")), duration.Round(time.Second)) - } - } -} - -func printDeploymentSummary(index int, status *DeploymentStatus) { - // Status with color - statusStyle := phpDeployStyle - switch status.Status { - case "queued", "building", "deploying", "pending", "rolling_back": - statusStyle = phpDeployPendingStyle - case "failed", "error", "cancelled": - statusStyle = phpDeployFailedStyle - } - - // Format: #1 [finished] abc1234 - commit message (2 hours ago) - id := status.ID - if len(id) > 8 { - id = id[:8] - } - - commit := status.Commit - if len(commit) > 7 { - commit = commit[:7] - } - - msg := status.CommitMessage - if len(msg) > 40 { - msg = msg[:37] + "..." - } - - age := "" - if !status.StartedAt.IsZero() { - age = i18n.TimeAgo(status.StartedAt) - } - - cli.Print(" %s %s %s", - dimStyle.Render(cli.Sprintf("#%d", index)), - statusStyle.Render(cli.Sprintf("[%s]", status.Status)), - id, - ) - - if commit != "" { - cli.Print(" %s", commit) - } - - if msg != "" { - cli.Print(" - %s", msg) - } - - if age != "" { - cli.Print(" %s", dimStyle.Render(cli.Sprintf("(%s)", age))) - } - - cli.Blank() -} diff --git a/pkg/php/cmd_dev.go b/pkg/php/cmd_dev.go deleted file mode 100644 index ebca16d..0000000 --- a/pkg/php/cmd_dev.go +++ /dev/null @@ -1,498 +0,0 @@ -package php - -import ( - "bufio" - "context" - "errors" - "os" - "os/signal" - "strings" - "syscall" - "time" - - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -var ( - devNoVite bool - devNoHorizon bool - devNoReverb bool - devNoRedis bool - devHTTPS bool - devDomain string - devPort int -) - -func addPHPDevCommand(parent *cobra.Command) { - devCmd := &cobra.Command{ - Use: "dev", - Short: i18n.T("cmd.php.dev.short"), - Long: i18n.T("cmd.php.dev.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return runPHPDev(phpDevOptions{ - NoVite: devNoVite, - NoHorizon: devNoHorizon, - NoReverb: devNoReverb, - NoRedis: devNoRedis, - HTTPS: devHTTPS, - Domain: devDomain, - Port: devPort, - }) - }, - } - - devCmd.Flags().BoolVar(&devNoVite, "no-vite", false, i18n.T("cmd.php.dev.flag.no_vite")) - devCmd.Flags().BoolVar(&devNoHorizon, "no-horizon", false, i18n.T("cmd.php.dev.flag.no_horizon")) - devCmd.Flags().BoolVar(&devNoReverb, "no-reverb", false, i18n.T("cmd.php.dev.flag.no_reverb")) - devCmd.Flags().BoolVar(&devNoRedis, "no-redis", false, i18n.T("cmd.php.dev.flag.no_redis")) - devCmd.Flags().BoolVar(&devHTTPS, "https", false, i18n.T("cmd.php.dev.flag.https")) - devCmd.Flags().StringVar(&devDomain, "domain", "", i18n.T("cmd.php.dev.flag.domain")) - devCmd.Flags().IntVar(&devPort, "port", 0, i18n.T("cmd.php.dev.flag.port")) - - parent.AddCommand(devCmd) -} - -type phpDevOptions struct { - NoVite bool - NoHorizon bool - NoReverb bool - NoRedis bool - HTTPS bool - Domain string - Port int -} - -func runPHPDev(opts phpDevOptions) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("failed to get working directory: %w", err) - } - - // Check if this is a Laravel project - if !IsLaravelProject(cwd) { - return errors.New(i18n.T("cmd.php.error.not_laravel")) - } - - // Get app name for display - appName := GetLaravelAppName(cwd) - if appName == "" { - appName = "Laravel" - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.T("cmd.php.dev.starting", map[string]interface{}{"AppName": appName})) - - // Detect services - services := DetectServices(cwd) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.services")), i18n.T("cmd.php.dev.detected_services")) - for _, svc := range services { - cli.Print(" %s %s\n", successStyle.Render("*"), svc) - } - cli.Blank() - - // Setup options - port := opts.Port - if port == 0 { - port = 8000 - } - - devOpts := Options{ - Dir: cwd, - NoVite: opts.NoVite, - NoHorizon: opts.NoHorizon, - NoReverb: opts.NoReverb, - NoRedis: opts.NoRedis, - HTTPS: opts.HTTPS, - Domain: opts.Domain, - FrankenPHPPort: port, - } - - // Create and start dev server - server := NewDevServer(devOpts) - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - // Handle shutdown signals - sigCh := make(chan os.Signal, 1) - signal.Notify(sigCh, syscall.SIGINT, syscall.SIGTERM) - - go func() { - <-sigCh - cli.Print("\n%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.T("cmd.php.dev.shutting_down")) - cancel() - }() - - if err := server.Start(ctx, devOpts); err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.start", "services"), err) - } - - // Print status - cli.Print("%s %s\n", successStyle.Render(i18n.T("cmd.php.label.running")), i18n.T("cmd.php.dev.services_started")) - printServiceStatuses(server.Status()) - cli.Blank() - - // Print URLs - appURL := GetLaravelAppURL(cwd) - if appURL == "" { - if opts.HTTPS { - appURL = cli.Sprintf("https://localhost:%d", port) - } else { - appURL = cli.Sprintf("http://localhost:%d", port) - } - } - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.app_url")), linkStyle.Render(appURL)) - - // Check for Vite - if !opts.NoVite && containsService(services, ServiceVite) { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.vite")), linkStyle.Render("http://localhost:5173")) - } - - cli.Print("\n%s\n\n", dimStyle.Render(i18n.T("cmd.php.dev.press_ctrl_c"))) - - // Stream unified logs - logsReader, err := server.Logs("", true) - if err != nil { - cli.Print("%s %s\n", errorStyle.Render(i18n.Label("warning")), i18n.T("i18n.fail.get", "logs")) - } else { - defer logsReader.Close() - - scanner := bufio.NewScanner(logsReader) - for scanner.Scan() { - select { - case <-ctx.Done(): - goto shutdown - default: - line := scanner.Text() - printColoredLog(line) - } - } - } - -shutdown: - // Stop services - if err := server.Stop(); err != nil { - cli.Print("%s %s\n", errorStyle.Render(i18n.Label("error")), i18n.T("cmd.php.dev.stop_error", map[string]interface{}{"Error": err})) - } - - cli.Print("%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("cmd.php.dev.all_stopped")) - return nil -} - -var ( - logsFollow bool - logsService string -) - -func addPHPLogsCommand(parent *cobra.Command) { - logsCmd := &cobra.Command{ - Use: "logs", - Short: i18n.T("cmd.php.logs.short"), - Long: i18n.T("cmd.php.logs.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return runPHPLogs(logsService, logsFollow) - }, - } - - logsCmd.Flags().BoolVar(&logsFollow, "follow", false, i18n.T("common.flag.follow")) - logsCmd.Flags().StringVar(&logsService, "service", "", i18n.T("cmd.php.logs.flag.service")) - - parent.AddCommand(logsCmd) -} - -func runPHPLogs(service string, follow bool) error { - cwd, err := os.Getwd() - if err != nil { - return err - } - - if !IsLaravelProject(cwd) { - return errors.New(i18n.T("cmd.php.error.not_laravel_short")) - } - - // Create a minimal server just to access logs - server := NewDevServer(Options{Dir: cwd}) - - logsReader, err := server.Logs(service, follow) - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "logs"), err) - } - defer logsReader.Close() - - // Handle interrupt - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - sigCh := make(chan os.Signal, 1) - signal.Notify(sigCh, syscall.SIGINT, syscall.SIGTERM) - - go func() { - <-sigCh - cancel() - }() - - scanner := bufio.NewScanner(logsReader) - for scanner.Scan() { - select { - case <-ctx.Done(): - return nil - default: - printColoredLog(scanner.Text()) - } - } - - return scanner.Err() -} - -func addPHPStopCommand(parent *cobra.Command) { - stopCmd := &cobra.Command{ - Use: "stop", - Short: i18n.T("cmd.php.stop.short"), - RunE: func(cmd *cobra.Command, args []string) error { - return runPHPStop() - }, - } - - parent.AddCommand(stopCmd) -} - -func runPHPStop() error { - cwd, err := os.Getwd() - if err != nil { - return err - } - - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.T("cmd.php.stop.stopping")) - - // We need to find running processes - // This is a simplified version - in practice you'd want to track PIDs - server := NewDevServer(Options{Dir: cwd}) - if err := server.Stop(); err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.stop", "services"), err) - } - - cli.Print("%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("cmd.php.dev.all_stopped")) - return nil -} - -func addPHPStatusCommand(parent *cobra.Command) { - statusCmd := &cobra.Command{ - Use: "status", - Short: i18n.T("cmd.php.status.short"), - RunE: func(cmd *cobra.Command, args []string) error { - return runPHPStatus() - }, - } - - parent.AddCommand(statusCmd) -} - -func runPHPStatus() error { - cwd, err := os.Getwd() - if err != nil { - return err - } - - if !IsLaravelProject(cwd) { - return errors.New(i18n.T("cmd.php.error.not_laravel_short")) - } - - appName := GetLaravelAppName(cwd) - if appName == "" { - appName = "Laravel" - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.Label("project")), appName) - - // Detect available services - services := DetectServices(cwd) - cli.Print("%s\n", dimStyle.Render(i18n.T("cmd.php.status.detected_services"))) - for _, svc := range services { - style := getServiceStyle(string(svc)) - cli.Print(" %s %s\n", style.Render("*"), svc) - } - cli.Blank() - - // Package manager - pm := DetectPackageManager(cwd) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.status.package_manager")), pm) - - // FrankenPHP status - if IsFrankenPHPProject(cwd) { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.status.octane_server")), "FrankenPHP") - } - - // SSL status - appURL := GetLaravelAppURL(cwd) - if appURL != "" { - domain := ExtractDomainFromURL(appURL) - if CertsExist(domain, SSLOptions{}) { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.status.ssl_certs")), successStyle.Render(i18n.T("cmd.php.status.ssl_installed"))) - } else { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.status.ssl_certs")), dimStyle.Render(i18n.T("cmd.php.status.ssl_not_setup"))) - } - } - - return nil -} - -var sslDomain string - -func addPHPSSLCommand(parent *cobra.Command) { - sslCmd := &cobra.Command{ - Use: "ssl", - Short: i18n.T("cmd.php.ssl.short"), - RunE: func(cmd *cobra.Command, args []string) error { - return runPHPSSL(sslDomain) - }, - } - - sslCmd.Flags().StringVar(&sslDomain, "domain", "", i18n.T("cmd.php.ssl.flag.domain")) - - parent.AddCommand(sslCmd) -} - -func runPHPSSL(domain string) error { - cwd, err := os.Getwd() - if err != nil { - return err - } - - // Get domain from APP_URL if not specified - if domain == "" { - appURL := GetLaravelAppURL(cwd) - if appURL != "" { - domain = ExtractDomainFromURL(appURL) - } - } - if domain == "" { - domain = "localhost" - } - - // Check if mkcert is installed - if !IsMkcertInstalled() { - cli.Print("%s %s\n", errorStyle.Render(i18n.Label("error")), i18n.T("cmd.php.ssl.mkcert_not_installed")) - cli.Print("\n%s\n", i18n.T("common.hint.install_with")) - cli.Print(" %s\n", i18n.T("cmd.php.ssl.install_macos")) - cli.Print(" %s\n", i18n.T("cmd.php.ssl.install_linux")) - return errors.New(i18n.T("cmd.php.error.mkcert_not_installed")) - } - - cli.Print("%s %s\n", dimStyle.Render("SSL:"), i18n.T("cmd.php.ssl.setting_up", map[string]interface{}{"Domain": domain})) - - // Check if certs already exist - if CertsExist(domain, SSLOptions{}) { - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("skip")), i18n.T("cmd.php.ssl.certs_exist")) - - certFile, keyFile, _ := CertPaths(domain, SSLOptions{}) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.ssl.cert_label")), certFile) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.ssl.key_label")), keyFile) - return nil - } - - // Setup SSL - if err := SetupSSL(domain, SSLOptions{}); err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.setup", "SSL"), err) - } - - certFile, keyFile, _ := CertPaths(domain, SSLOptions{}) - - cli.Print("%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("cmd.php.ssl.certs_created")) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.ssl.cert_label")), certFile) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.ssl.key_label")), keyFile) - - return nil -} - -// Helper functions for dev commands - -func printServiceStatuses(statuses []ServiceStatus) { - for _, s := range statuses { - style := getServiceStyle(s.Name) - var statusText string - - if s.Error != nil { - statusText = phpStatusError.Render(i18n.T("cmd.php.status.error", map[string]interface{}{"Error": s.Error})) - } else if s.Running { - statusText = phpStatusRunning.Render(i18n.T("cmd.php.status.running")) - if s.Port > 0 { - statusText += dimStyle.Render(cli.Sprintf(" (%s)", i18n.T("cmd.php.status.port", map[string]interface{}{"Port": s.Port}))) - } - if s.PID > 0 { - statusText += dimStyle.Render(cli.Sprintf(" [%s]", i18n.T("cmd.php.status.pid", map[string]interface{}{"PID": s.PID}))) - } - } else { - statusText = phpStatusStopped.Render(i18n.T("cmd.php.status.stopped")) - } - - cli.Print(" %s %s\n", style.Render(s.Name+":"), statusText) - } -} - -func printColoredLog(line string) { - // Parse service prefix from log line - timestamp := time.Now().Format("15:04:05") - - var style *cli.AnsiStyle - serviceName := "" - - if strings.HasPrefix(line, "[FrankenPHP]") { - style = phpFrankenPHPStyle - serviceName = "FrankenPHP" - line = strings.TrimPrefix(line, "[FrankenPHP] ") - } else if strings.HasPrefix(line, "[Vite]") { - style = phpViteStyle - serviceName = "Vite" - line = strings.TrimPrefix(line, "[Vite] ") - } else if strings.HasPrefix(line, "[Horizon]") { - style = phpHorizonStyle - serviceName = "Horizon" - line = strings.TrimPrefix(line, "[Horizon] ") - } else if strings.HasPrefix(line, "[Reverb]") { - style = phpReverbStyle - serviceName = "Reverb" - line = strings.TrimPrefix(line, "[Reverb] ") - } else if strings.HasPrefix(line, "[Redis]") { - style = phpRedisStyle - serviceName = "Redis" - line = strings.TrimPrefix(line, "[Redis] ") - } else { - // Unknown service, print as-is - cli.Print("%s %s\n", dimStyle.Render(timestamp), line) - return - } - - cli.Print("%s %s %s\n", - dimStyle.Render(timestamp), - style.Render(cli.Sprintf("[%s]", serviceName)), - line, - ) -} - -func getServiceStyle(name string) *cli.AnsiStyle { - switch strings.ToLower(name) { - case "frankenphp": - return phpFrankenPHPStyle - case "vite": - return phpViteStyle - case "horizon": - return phpHorizonStyle - case "reverb": - return phpReverbStyle - case "redis": - return phpRedisStyle - default: - return dimStyle - } -} - -func containsService(services []DetectedService, target DetectedService) bool { - for _, s := range services { - if s == target { - return true - } - } - return false -} diff --git a/pkg/php/cmd_packages.go b/pkg/php/cmd_packages.go deleted file mode 100644 index d8e8793..0000000 --- a/pkg/php/cmd_packages.go +++ /dev/null @@ -1,146 +0,0 @@ -package php - -import ( - "os" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -func addPHPPackagesCommands(parent *cobra.Command) { - packagesCmd := &cobra.Command{ - Use: "packages", - Short: i18n.T("cmd.php.packages.short"), - Long: i18n.T("cmd.php.packages.long"), - } - parent.AddCommand(packagesCmd) - - addPHPPackagesLinkCommand(packagesCmd) - addPHPPackagesUnlinkCommand(packagesCmd) - addPHPPackagesUpdateCommand(packagesCmd) - addPHPPackagesListCommand(packagesCmd) -} - -func addPHPPackagesLinkCommand(parent *cobra.Command) { - linkCmd := &cobra.Command{ - Use: "link [paths...]", - Short: i18n.T("cmd.php.packages.link.short"), - Long: i18n.T("cmd.php.packages.link.long"), - Args: cobra.MinimumNArgs(1), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.T("cmd.php.packages.link.linking")) - - if err := LinkPackages(cwd, args); err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.link", "packages"), err) - } - - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("cmd.php.packages.link.done")) - return nil - }, - } - - parent.AddCommand(linkCmd) -} - -func addPHPPackagesUnlinkCommand(parent *cobra.Command) { - unlinkCmd := &cobra.Command{ - Use: "unlink [packages...]", - Short: i18n.T("cmd.php.packages.unlink.short"), - Long: i18n.T("cmd.php.packages.unlink.long"), - Args: cobra.MinimumNArgs(1), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.T("cmd.php.packages.unlink.unlinking")) - - if err := UnlinkPackages(cwd, args); err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.unlink", "packages"), err) - } - - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("cmd.php.packages.unlink.done")) - return nil - }, - } - - parent.AddCommand(unlinkCmd) -} - -func addPHPPackagesUpdateCommand(parent *cobra.Command) { - updateCmd := &cobra.Command{ - Use: "update [packages...]", - Short: i18n.T("cmd.php.packages.update.short"), - Long: i18n.T("cmd.php.packages.update.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.T("cmd.php.packages.update.updating")) - - if err := UpdatePackages(cwd, args); err != nil { - return cli.Err("%s: %w", i18n.T("cmd.php.error.update_packages"), err) - } - - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("cmd.php.packages.update.done")) - return nil - }, - } - - parent.AddCommand(updateCmd) -} - -func addPHPPackagesListCommand(parent *cobra.Command) { - listCmd := &cobra.Command{ - Use: "list", - Short: i18n.T("cmd.php.packages.list.short"), - Long: i18n.T("cmd.php.packages.list.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - packages, err := ListLinkedPackages(cwd) - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.list", "packages"), err) - } - - if len(packages) == 0 { - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.T("cmd.php.packages.list.none_found")) - return nil - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.T("cmd.php.packages.list.linked")) - - for _, pkg := range packages { - name := pkg.Name - if name == "" { - name = i18n.T("cmd.php.packages.list.unknown") - } - version := pkg.Version - if version == "" { - version = "dev" - } - - cli.Print(" %s %s\n", successStyle.Render("*"), name) - cli.Print(" %s %s\n", dimStyle.Render(i18n.Label("path")), pkg.Path) - cli.Print(" %s %s\n", dimStyle.Render(i18n.Label("version")), version) - cli.Blank() - } - - return nil - }, - } - - parent.AddCommand(listCmd) -} diff --git a/pkg/php/cmd_qa_runner.go b/pkg/php/cmd_qa_runner.go deleted file mode 100644 index 9d8c8ce..0000000 --- a/pkg/php/cmd_qa_runner.go +++ /dev/null @@ -1,338 +0,0 @@ -package php - -import ( - "context" - "os" - "path/filepath" - "strings" - "sync" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/framework" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/process" -) - -// QARunner orchestrates PHP QA checks using pkg/process. -type QARunner struct { - dir string - fix bool - service *process.Service - core *framework.Core - - // Output tracking - outputMu sync.Mutex - checkOutputs map[string][]string -} - -// NewQARunner creates a QA runner for the given directory. -func NewQARunner(dir string, fix bool) (*QARunner, error) { - // Create a Core with process service for the QA session - core, err := framework.New( - framework.WithName("process", process.NewService(process.Options{})), - ) - if err != nil { - return nil, cli.WrapVerb(err, "create", "process service") - } - - svc, err := framework.ServiceFor[*process.Service](core, "process") - if err != nil { - return nil, cli.WrapVerb(err, "get", "process service") - } - - runner := &QARunner{ - dir: dir, - fix: fix, - service: svc, - core: core, - checkOutputs: make(map[string][]string), - } - - return runner, nil -} - -// BuildSpecs creates RunSpecs for the given QA checks. -func (r *QARunner) BuildSpecs(checks []string) []process.RunSpec { - specs := make([]process.RunSpec, 0, len(checks)) - - for _, check := range checks { - spec := r.buildSpec(check) - if spec != nil { - specs = append(specs, *spec) - } - } - - return specs -} - -// buildSpec creates a RunSpec for a single check. -func (r *QARunner) buildSpec(check string) *process.RunSpec { - switch check { - case "audit": - return &process.RunSpec{ - Name: "audit", - Command: "composer", - Args: []string{"audit", "--format=summary"}, - Dir: r.dir, - } - - case "fmt": - formatter, found := DetectFormatter(r.dir) - if !found { - return nil - } - if formatter == FormatterPint { - vendorBin := filepath.Join(r.dir, "vendor", "bin", "pint") - cmd := "pint" - if _, err := os.Stat(vendorBin); err == nil { - cmd = vendorBin - } - args := []string{} - if !r.fix { - args = append(args, "--test") - } - return &process.RunSpec{ - Name: "fmt", - Command: cmd, - Args: args, - Dir: r.dir, - After: []string{"audit"}, - } - } - return nil - - case "stan": - _, found := DetectAnalyser(r.dir) - if !found { - return nil - } - vendorBin := filepath.Join(r.dir, "vendor", "bin", "phpstan") - cmd := "phpstan" - if _, err := os.Stat(vendorBin); err == nil { - cmd = vendorBin - } - return &process.RunSpec{ - Name: "stan", - Command: cmd, - Args: []string{"analyse", "--no-progress"}, - Dir: r.dir, - After: []string{"fmt"}, - } - - case "psalm": - _, found := DetectPsalm(r.dir) - if !found { - return nil - } - vendorBin := filepath.Join(r.dir, "vendor", "bin", "psalm") - cmd := "psalm" - if _, err := os.Stat(vendorBin); err == nil { - cmd = vendorBin - } - args := []string{"--no-progress"} - if r.fix { - args = append(args, "--alter", "--issues=all") - } - return &process.RunSpec{ - Name: "psalm", - Command: cmd, - Args: args, - Dir: r.dir, - After: []string{"stan"}, - } - - case "test": - // Check for Pest first, fall back to PHPUnit - pestBin := filepath.Join(r.dir, "vendor", "bin", "pest") - phpunitBin := filepath.Join(r.dir, "vendor", "bin", "phpunit") - - cmd := "pest" - if _, err := os.Stat(pestBin); err == nil { - cmd = pestBin - } else if _, err := os.Stat(phpunitBin); err == nil { - cmd = phpunitBin - } else { - return nil - } - - // Tests depend on stan (or psalm if available) - after := []string{"stan"} - if _, found := DetectPsalm(r.dir); found { - after = []string{"psalm"} - } - - return &process.RunSpec{ - Name: "test", - Command: cmd, - Args: []string{}, - Dir: r.dir, - After: after, - } - - case "rector": - if !DetectRector(r.dir) { - return nil - } - vendorBin := filepath.Join(r.dir, "vendor", "bin", "rector") - cmd := "rector" - if _, err := os.Stat(vendorBin); err == nil { - cmd = vendorBin - } - args := []string{"process"} - if !r.fix { - args = append(args, "--dry-run") - } - return &process.RunSpec{ - Name: "rector", - Command: cmd, - Args: args, - Dir: r.dir, - After: []string{"test"}, - AllowFailure: true, // Dry-run returns non-zero if changes would be made - } - - case "infection": - if !DetectInfection(r.dir) { - return nil - } - vendorBin := filepath.Join(r.dir, "vendor", "bin", "infection") - cmd := "infection" - if _, err := os.Stat(vendorBin); err == nil { - cmd = vendorBin - } - return &process.RunSpec{ - Name: "infection", - Command: cmd, - Args: []string{"--min-msi=50", "--min-covered-msi=70", "--threads=4"}, - Dir: r.dir, - After: []string{"test"}, - AllowFailure: true, - } - } - - return nil -} - -// Run executes all QA checks and returns the results. -func (r *QARunner) Run(ctx context.Context, stages []QAStage) (*QARunResult, error) { - // Collect all checks from all stages - var allChecks []string - for _, stage := range stages { - checks := GetQAChecks(r.dir, stage) - allChecks = append(allChecks, checks...) - } - - if len(allChecks) == 0 { - return &QARunResult{Passed: true}, nil - } - - // Build specs - specs := r.BuildSpecs(allChecks) - if len(specs) == 0 { - return &QARunResult{Passed: true}, nil - } - - // Register output handler - r.core.RegisterAction(func(c *framework.Core, msg framework.Message) error { - switch m := msg.(type) { - case process.ActionProcessOutput: - r.outputMu.Lock() - // Extract check name from process ID mapping - for _, spec := range specs { - if strings.Contains(m.ID, spec.Name) || m.ID != "" { - // Store output for later display if needed - r.checkOutputs[spec.Name] = append(r.checkOutputs[spec.Name], m.Line) - break - } - } - r.outputMu.Unlock() - } - return nil - }) - - // Create runner and execute - runner := process.NewRunner(r.service) - result, err := runner.RunAll(ctx, specs) - if err != nil { - return nil, err - } - - // Convert to QA result - qaResult := &QARunResult{ - Passed: result.Success(), - Duration: result.Duration.String(), - Results: make([]QACheckRunResult, 0, len(result.Results)), - } - - for _, res := range result.Results { - qaResult.Results = append(qaResult.Results, QACheckRunResult{ - Name: res.Name, - Passed: res.Passed(), - Skipped: res.Skipped, - ExitCode: res.ExitCode, - Duration: res.Duration.String(), - Output: res.Output, - }) - if res.Passed() { - qaResult.PassedCount++ - } else if res.Skipped { - qaResult.SkippedCount++ - } else { - qaResult.FailedCount++ - } - } - - return qaResult, nil -} - -// GetCheckOutput returns captured output for a check. -func (r *QARunner) GetCheckOutput(check string) []string { - r.outputMu.Lock() - defer r.outputMu.Unlock() - return r.checkOutputs[check] -} - -// QARunResult holds the results of running QA checks. -type QARunResult struct { - Passed bool - Duration string - Results []QACheckRunResult - PassedCount int - FailedCount int - SkippedCount int -} - -// QACheckRunResult holds the result of a single QA check. -type QACheckRunResult struct { - Name string - Passed bool - Skipped bool - ExitCode int - Duration string - Output string -} - -// GetIssueMessage returns an issue message for a check. -func (r QACheckRunResult) GetIssueMessage() string { - if r.Passed || r.Skipped { - return "" - } - switch r.Name { - case "audit": - return i18n.T("i18n.done.find", "vulnerabilities") - case "fmt": - return i18n.T("i18n.done.find", "style issues") - case "stan": - return i18n.T("i18n.done.find", "analysis errors") - case "psalm": - return i18n.T("i18n.done.find", "type errors") - case "test": - return i18n.T("i18n.done.fail", "tests") - case "rector": - return i18n.T("i18n.done.find", "refactoring suggestions") - case "infection": - return i18n.T("i18n.fail.pass", "mutation testing") - default: - return i18n.T("i18n.done.find", "issues") - } -} diff --git a/pkg/php/cmd_quality.go b/pkg/php/cmd_quality.go deleted file mode 100644 index 0febf46..0000000 --- a/pkg/php/cmd_quality.go +++ /dev/null @@ -1,756 +0,0 @@ -package php - -import ( - "context" - "errors" - "os" - "strings" - - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -var ( - testParallel bool - testCoverage bool - testFilter string - testGroup string -) - -func addPHPTestCommand(parent *cobra.Command) { - testCmd := &cobra.Command{ - Use: "test", - Short: i18n.T("cmd.php.test.short"), - Long: i18n.T("cmd.php.test.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - if !IsPHPProject(cwd) { - return errors.New(i18n.T("cmd.php.error.not_php")) - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.ProgressSubject("run", "tests")) - - ctx := context.Background() - - opts := TestOptions{ - Dir: cwd, - Filter: testFilter, - Parallel: testParallel, - Coverage: testCoverage, - Output: os.Stdout, - } - - if testGroup != "" { - opts.Groups = []string{testGroup} - } - - if err := RunTests(ctx, opts); err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.run", "tests"), err) - } - - return nil - }, - } - - testCmd.Flags().BoolVar(&testParallel, "parallel", false, i18n.T("cmd.php.test.flag.parallel")) - testCmd.Flags().BoolVar(&testCoverage, "coverage", false, i18n.T("cmd.php.test.flag.coverage")) - testCmd.Flags().StringVar(&testFilter, "filter", "", i18n.T("cmd.php.test.flag.filter")) - testCmd.Flags().StringVar(&testGroup, "group", "", i18n.T("cmd.php.test.flag.group")) - - parent.AddCommand(testCmd) -} - -var ( - fmtFix bool - fmtDiff bool -) - -func addPHPFmtCommand(parent *cobra.Command) { - fmtCmd := &cobra.Command{ - Use: "fmt [paths...]", - Short: i18n.T("cmd.php.fmt.short"), - Long: i18n.T("cmd.php.fmt.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - if !IsPHPProject(cwd) { - return errors.New(i18n.T("cmd.php.error.not_php")) - } - - // Detect formatter - formatter, found := DetectFormatter(cwd) - if !found { - return errors.New(i18n.T("cmd.php.fmt.no_formatter")) - } - - var msg string - if fmtFix { - msg = i18n.T("cmd.php.fmt.formatting", map[string]interface{}{"Formatter": formatter}) - } else { - msg = i18n.ProgressSubject("check", "code style") - } - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.php")), msg) - - ctx := context.Background() - - opts := FormatOptions{ - Dir: cwd, - Fix: fmtFix, - Diff: fmtDiff, - Output: os.Stdout, - } - - // Get any additional paths from args - if len(args) > 0 { - opts.Paths = args - } - - if err := Format(ctx, opts); err != nil { - if fmtFix { - return cli.Err("%s: %w", i18n.T("cmd.php.error.fmt_failed"), err) - } - return cli.Err("%s: %w", i18n.T("cmd.php.error.fmt_issues"), err) - } - - if fmtFix { - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("common.success.completed", map[string]any{"Action": "Code formatted"})) - } else { - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("cmd.php.fmt.no_issues")) - } - - return nil - }, - } - - fmtCmd.Flags().BoolVar(&fmtFix, "fix", false, i18n.T("cmd.php.fmt.flag.fix")) - fmtCmd.Flags().BoolVar(&fmtDiff, "diff", false, i18n.T("common.flag.diff")) - - parent.AddCommand(fmtCmd) -} - -var ( - stanLevel int - stanMemory string -) - -func addPHPStanCommand(parent *cobra.Command) { - stanCmd := &cobra.Command{ - Use: "stan [paths...]", - Short: i18n.T("cmd.php.analyse.short"), - Long: i18n.T("cmd.php.analyse.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - if !IsPHPProject(cwd) { - return errors.New(i18n.T("cmd.php.error.not_php")) - } - - // Detect analyser - _, found := DetectAnalyser(cwd) - if !found { - return errors.New(i18n.T("cmd.php.analyse.no_analyser")) - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.php")), i18n.ProgressSubject("run", "static analysis")) - - ctx := context.Background() - - opts := AnalyseOptions{ - Dir: cwd, - Level: stanLevel, - Memory: stanMemory, - Output: os.Stdout, - } - - // Get any additional paths from args - if len(args) > 0 { - opts.Paths = args - } - - if err := Analyse(ctx, opts); err != nil { - return cli.Err("%s: %w", i18n.T("cmd.php.error.analysis_issues"), err) - } - - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("common.result.no_issues")) - return nil - }, - } - - stanCmd.Flags().IntVar(&stanLevel, "level", 0, i18n.T("cmd.php.analyse.flag.level")) - stanCmd.Flags().StringVar(&stanMemory, "memory", "", i18n.T("cmd.php.analyse.flag.memory")) - - parent.AddCommand(stanCmd) -} - -// ============================================================================= -// New QA Commands -// ============================================================================= - -var ( - psalmLevel int - psalmFix bool - psalmBaseline bool - psalmShowInfo bool -) - -func addPHPPsalmCommand(parent *cobra.Command) { - psalmCmd := &cobra.Command{ - Use: "psalm", - Short: i18n.T("cmd.php.psalm.short"), - Long: i18n.T("cmd.php.psalm.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - if !IsPHPProject(cwd) { - return errors.New(i18n.T("cmd.php.error.not_php")) - } - - // Check if Psalm is available - _, found := DetectPsalm(cwd) - if !found { - cli.Print("%s %s\n\n", errorStyle.Render(i18n.Label("error")), i18n.T("cmd.php.psalm.not_found")) - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("install")), i18n.T("cmd.php.psalm.install")) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.setup")), i18n.T("cmd.php.psalm.setup")) - return errors.New(i18n.T("cmd.php.error.psalm_not_installed")) - } - - var msg string - if psalmFix { - msg = i18n.T("cmd.php.psalm.analysing_fixing") - } else { - msg = i18n.T("cmd.php.psalm.analysing") - } - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.psalm")), msg) - - ctx := context.Background() - - opts := PsalmOptions{ - Dir: cwd, - Level: psalmLevel, - Fix: psalmFix, - Baseline: psalmBaseline, - ShowInfo: psalmShowInfo, - Output: os.Stdout, - } - - if err := RunPsalm(ctx, opts); err != nil { - return cli.Err("%s: %w", i18n.T("cmd.php.error.psalm_issues"), err) - } - - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("common.result.no_issues")) - return nil - }, - } - - psalmCmd.Flags().IntVar(&psalmLevel, "level", 0, i18n.T("cmd.php.psalm.flag.level")) - psalmCmd.Flags().BoolVar(&psalmFix, "fix", false, i18n.T("common.flag.fix")) - psalmCmd.Flags().BoolVar(&psalmBaseline, "baseline", false, i18n.T("cmd.php.psalm.flag.baseline")) - psalmCmd.Flags().BoolVar(&psalmShowInfo, "show-info", false, i18n.T("cmd.php.psalm.flag.show_info")) - - parent.AddCommand(psalmCmd) -} - -var ( - auditJSONOutput bool - auditFix bool -) - -func addPHPAuditCommand(parent *cobra.Command) { - auditCmd := &cobra.Command{ - Use: "audit", - Short: i18n.T("cmd.php.audit.short"), - Long: i18n.T("cmd.php.audit.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - if !IsPHPProject(cwd) { - return errors.New(i18n.T("cmd.php.error.not_php")) - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.audit")), i18n.T("cmd.php.audit.scanning")) - - ctx := context.Background() - - results, err := RunAudit(ctx, AuditOptions{ - Dir: cwd, - JSON: auditJSONOutput, - Fix: auditFix, - Output: os.Stdout, - }) - if err != nil { - return cli.Err("%s: %w", i18n.T("cmd.php.error.audit_failed"), err) - } - - // Print results - totalVulns := 0 - hasErrors := false - - for _, result := range results { - icon := successStyle.Render("✓") - status := successStyle.Render(i18n.T("cmd.php.audit.secure")) - - if result.Error != nil { - icon = errorStyle.Render("✗") - status = errorStyle.Render(i18n.T("cmd.php.audit.error")) - hasErrors = true - } else if result.Vulnerabilities > 0 { - icon = errorStyle.Render("✗") - status = errorStyle.Render(i18n.T("cmd.php.audit.vulnerabilities", map[string]interface{}{"Count": result.Vulnerabilities})) - totalVulns += result.Vulnerabilities - } - - cli.Print(" %s %s %s\n", icon, dimStyle.Render(result.Tool+":"), status) - - // Show advisories - for _, adv := range result.Advisories { - severity := adv.Severity - if severity == "" { - severity = "unknown" - } - sevStyle := getSeverityStyle(severity) - cli.Print(" %s %s\n", sevStyle.Render("["+severity+"]"), adv.Package) - if adv.Title != "" { - cli.Print(" %s\n", dimStyle.Render(adv.Title)) - } - } - } - - cli.Blank() - - if totalVulns > 0 { - cli.Print("%s %s\n", errorStyle.Render(i18n.Label("warning")), i18n.T("cmd.php.audit.found_vulns", map[string]interface{}{"Count": totalVulns})) - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("fix")), i18n.T("common.hint.fix_deps")) - return errors.New(i18n.T("cmd.php.error.vulns_found")) - } - - if hasErrors { - return errors.New(i18n.T("cmd.php.audit.completed_errors")) - } - - cli.Print("%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("cmd.php.audit.all_secure")) - return nil - }, - } - - auditCmd.Flags().BoolVar(&auditJSONOutput, "json", false, i18n.T("common.flag.json")) - auditCmd.Flags().BoolVar(&auditFix, "fix", false, i18n.T("cmd.php.audit.flag.fix")) - - parent.AddCommand(auditCmd) -} - -var ( - securitySeverity string - securityJSONOutput bool - securitySarif bool - securityURL string -) - -func addPHPSecurityCommand(parent *cobra.Command) { - securityCmd := &cobra.Command{ - Use: "security", - Short: i18n.T("cmd.php.security.short"), - Long: i18n.T("cmd.php.security.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - if !IsPHPProject(cwd) { - return errors.New(i18n.T("cmd.php.error.not_php")) - } - - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.security")), i18n.ProgressSubject("run", "security checks")) - - ctx := context.Background() - - result, err := RunSecurityChecks(ctx, SecurityOptions{ - Dir: cwd, - Severity: securitySeverity, - JSON: securityJSONOutput, - SARIF: securitySarif, - URL: securityURL, - Output: os.Stdout, - }) - if err != nil { - return cli.Err("%s: %w", i18n.T("cmd.php.error.security_failed"), err) - } - - // Print results by category - currentCategory := "" - for _, check := range result.Checks { - category := strings.Split(check.ID, "_")[0] - if category != currentCategory { - if currentCategory != "" { - cli.Blank() - } - currentCategory = category - cli.Print(" %s\n", dimStyle.Render(strings.ToUpper(category)+i18n.T("cmd.php.security.checks_suffix"))) - } - - icon := successStyle.Render("✓") - if !check.Passed { - icon = getSeverityStyle(check.Severity).Render("✗") - } - - cli.Print(" %s %s\n", icon, check.Name) - if !check.Passed && check.Message != "" { - cli.Print(" %s\n", dimStyle.Render(check.Message)) - if check.Fix != "" { - cli.Print(" %s %s\n", dimStyle.Render(i18n.Label("fix")), check.Fix) - } - } - } - - cli.Blank() - - // Print summary - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("summary")), i18n.T("cmd.php.security.summary")) - cli.Print(" %s %d/%d\n", dimStyle.Render(i18n.T("cmd.php.security.passed")), result.Summary.Passed, result.Summary.Total) - - if result.Summary.Critical > 0 { - cli.Print(" %s %d\n", phpSecurityCriticalStyle.Render(i18n.T("cmd.php.security.critical")), result.Summary.Critical) - } - if result.Summary.High > 0 { - cli.Print(" %s %d\n", phpSecurityHighStyle.Render(i18n.T("cmd.php.security.high")), result.Summary.High) - } - if result.Summary.Medium > 0 { - cli.Print(" %s %d\n", phpSecurityMediumStyle.Render(i18n.T("cmd.php.security.medium")), result.Summary.Medium) - } - if result.Summary.Low > 0 { - cli.Print(" %s %d\n", phpSecurityLowStyle.Render(i18n.T("cmd.php.security.low")), result.Summary.Low) - } - - if result.Summary.Critical > 0 || result.Summary.High > 0 { - return errors.New(i18n.T("cmd.php.error.critical_high_issues")) - } - - return nil - }, - } - - securityCmd.Flags().StringVar(&securitySeverity, "severity", "", i18n.T("cmd.php.security.flag.severity")) - securityCmd.Flags().BoolVar(&securityJSONOutput, "json", false, i18n.T("common.flag.json")) - securityCmd.Flags().BoolVar(&securitySarif, "sarif", false, i18n.T("cmd.php.security.flag.sarif")) - securityCmd.Flags().StringVar(&securityURL, "url", "", i18n.T("cmd.php.security.flag.url")) - - parent.AddCommand(securityCmd) -} - -var ( - qaQuick bool - qaFull bool - qaFix bool -) - -func addPHPQACommand(parent *cobra.Command) { - qaCmd := &cobra.Command{ - Use: "qa", - Short: i18n.T("cmd.php.qa.short"), - Long: i18n.T("cmd.php.qa.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - if !IsPHPProject(cwd) { - return errors.New(i18n.T("cmd.php.error.not_php")) - } - - // Determine stages - opts := QAOptions{ - Dir: cwd, - Quick: qaQuick, - Full: qaFull, - Fix: qaFix, - } - stages := GetQAStages(opts) - - // Print header - cli.Print("%s %s\n\n", dimStyle.Render(i18n.Label("qa")), i18n.ProgressSubject("run", "QA pipeline")) - - ctx := context.Background() - - // Create QA runner using pkg/process - runner, err := NewQARunner(cwd, qaFix) - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.create", "QA runner"), err) - } - - // Run all checks with dependency ordering - result, err := runner.Run(ctx, stages) - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.run", "QA checks"), err) - } - - // Display results by stage - currentStage := "" - for _, checkResult := range result.Results { - // Determine stage for this check - stage := getCheckStage(checkResult.Name, stages, cwd) - if stage != currentStage { - if currentStage != "" { - cli.Blank() - } - currentStage = stage - cli.Print("%s\n", phpQAStageStyle.Render("── "+strings.ToUpper(stage)+" ──")) - } - - icon := phpQAPassedStyle.Render("✓") - status := phpQAPassedStyle.Render(i18n.T("i18n.done.pass")) - if checkResult.Skipped { - icon = dimStyle.Render("-") - status = dimStyle.Render(i18n.T("i18n.done.skip")) - } else if !checkResult.Passed { - icon = phpQAFailedStyle.Render("✗") - status = phpQAFailedStyle.Render(i18n.T("i18n.done.fail")) - } - - cli.Print(" %s %s %s %s\n", icon, checkResult.Name, status, dimStyle.Render(checkResult.Duration)) - } - cli.Blank() - - // Print summary - if result.Passed { - cli.Print("%s %s\n", phpQAPassedStyle.Render("QA PASSED:"), i18n.T("i18n.count.check", result.PassedCount)+" "+i18n.T("i18n.done.pass")) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("i18n.label.duration")), result.Duration) - return nil - } - - cli.Print("%s %s\n\n", phpQAFailedStyle.Render("QA FAILED:"), i18n.T("i18n.count.check", result.PassedCount)+"/"+cli.Sprint(len(result.Results))+" "+i18n.T("i18n.done.pass")) - - // Show what needs fixing - cli.Print("%s\n", dimStyle.Render(i18n.T("i18n.label.fix"))) - for _, checkResult := range result.Results { - if checkResult.Passed || checkResult.Skipped { - continue - } - fixCmd := getQAFixCommand(checkResult.Name, qaFix) - issue := checkResult.GetIssueMessage() - if issue == "" { - issue = "issues found" - } - cli.Print(" %s %s\n", phpQAFailedStyle.Render("*"), checkResult.Name+": "+issue) - if fixCmd != "" { - cli.Print(" %s %s\n", dimStyle.Render("->"), fixCmd) - } - } - - return cli.Err("%s", i18n.T("i18n.fail.run", "QA pipeline")) - }, - } - - qaCmd.Flags().BoolVar(&qaQuick, "quick", false, "Run quick checks only (audit, fmt, stan)") - qaCmd.Flags().BoolVar(&qaFull, "full", false, "Run all stages including slow checks") - qaCmd.Flags().BoolVar(&qaFix, "fix", false, "Auto-fix issues where possible") - - parent.AddCommand(qaCmd) -} - -// getCheckStage determines which stage a check belongs to. -func getCheckStage(checkName string, stages []QAStage, dir string) string { - for _, stage := range stages { - checks := GetQAChecks(dir, stage) - for _, c := range checks { - if c == checkName { - return string(stage) - } - } - } - return "unknown" -} - -func getQAFixCommand(checkName string, fixEnabled bool) string { - switch checkName { - case "audit": - return i18n.T("i18n.progress.update", "dependencies") - case "fmt": - if fixEnabled { - return "" - } - return "core php fmt --fix" - case "stan": - return i18n.T("i18n.progress.fix", "PHPStan errors") - case "psalm": - return i18n.T("i18n.progress.fix", "Psalm errors") - case "test": - return i18n.T("i18n.progress.fix", i18n.T("i18n.done.fail")+" tests") - case "rector": - if fixEnabled { - return "" - } - return "core php rector --fix" - case "infection": - return i18n.T("i18n.progress.improve", "test coverage") - } - return "" -} - -var ( - rectorFix bool - rectorDiff bool - rectorClearCache bool -) - -func addPHPRectorCommand(parent *cobra.Command) { - rectorCmd := &cobra.Command{ - Use: "rector", - Short: i18n.T("cmd.php.rector.short"), - Long: i18n.T("cmd.php.rector.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - if !IsPHPProject(cwd) { - return errors.New(i18n.T("cmd.php.error.not_php")) - } - - // Check if Rector is available - if !DetectRector(cwd) { - cli.Print("%s %s\n\n", errorStyle.Render(i18n.Label("error")), i18n.T("cmd.php.rector.not_found")) - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("install")), i18n.T("cmd.php.rector.install")) - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.setup")), i18n.T("cmd.php.rector.setup")) - return errors.New(i18n.T("cmd.php.error.rector_not_installed")) - } - - var msg string - if rectorFix { - msg = i18n.T("cmd.php.rector.refactoring") - } else { - msg = i18n.T("cmd.php.rector.analysing") - } - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.rector")), msg) - - ctx := context.Background() - - opts := RectorOptions{ - Dir: cwd, - Fix: rectorFix, - Diff: rectorDiff, - ClearCache: rectorClearCache, - Output: os.Stdout, - } - - if err := RunRector(ctx, opts); err != nil { - if rectorFix { - return cli.Err("%s: %w", i18n.T("cmd.php.error.rector_failed"), err) - } - // Dry-run returns non-zero if changes would be made - cli.Print("\n%s %s\n", phpQAWarningStyle.Render(i18n.T("cmd.php.label.info")), i18n.T("cmd.php.rector.changes_suggested")) - return nil - } - - if rectorFix { - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("common.success.completed", map[string]any{"Action": "Code refactored"})) - } else { - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("cmd.php.rector.no_changes")) - } - return nil - }, - } - - rectorCmd.Flags().BoolVar(&rectorFix, "fix", false, i18n.T("cmd.php.rector.flag.fix")) - rectorCmd.Flags().BoolVar(&rectorDiff, "diff", false, i18n.T("cmd.php.rector.flag.diff")) - rectorCmd.Flags().BoolVar(&rectorClearCache, "clear-cache", false, i18n.T("cmd.php.rector.flag.clear_cache")) - - parent.AddCommand(rectorCmd) -} - -var ( - infectionMinMSI int - infectionMinCoveredMSI int - infectionThreads int - infectionFilter string - infectionOnlyCovered bool -) - -func addPHPInfectionCommand(parent *cobra.Command) { - infectionCmd := &cobra.Command{ - Use: "infection", - Short: i18n.T("cmd.php.infection.short"), - Long: i18n.T("cmd.php.infection.long"), - RunE: func(cmd *cobra.Command, args []string) error { - cwd, err := os.Getwd() - if err != nil { - return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - if !IsPHPProject(cwd) { - return errors.New(i18n.T("cmd.php.error.not_php")) - } - - // Check if Infection is available - if !DetectInfection(cwd) { - cli.Print("%s %s\n\n", errorStyle.Render(i18n.Label("error")), i18n.T("cmd.php.infection.not_found")) - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("install")), i18n.T("cmd.php.infection.install")) - return errors.New(i18n.T("cmd.php.error.infection_not_installed")) - } - - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.php.label.infection")), i18n.ProgressSubject("run", "mutation testing")) - cli.Print("%s %s\n\n", dimStyle.Render(i18n.T("cmd.php.label.info")), i18n.T("cmd.php.infection.note")) - - ctx := context.Background() - - opts := InfectionOptions{ - Dir: cwd, - MinMSI: infectionMinMSI, - MinCoveredMSI: infectionMinCoveredMSI, - Threads: infectionThreads, - Filter: infectionFilter, - OnlyCovered: infectionOnlyCovered, - Output: os.Stdout, - } - - if err := RunInfection(ctx, opts); err != nil { - return cli.Err("%s: %w", i18n.T("cmd.php.error.infection_failed"), err) - } - - cli.Print("\n%s %s\n", successStyle.Render(i18n.Label("done")), i18n.T("cmd.php.infection.complete")) - return nil - }, - } - - infectionCmd.Flags().IntVar(&infectionMinMSI, "min-msi", 0, i18n.T("cmd.php.infection.flag.min_msi")) - infectionCmd.Flags().IntVar(&infectionMinCoveredMSI, "min-covered-msi", 0, i18n.T("cmd.php.infection.flag.min_covered_msi")) - infectionCmd.Flags().IntVar(&infectionThreads, "threads", 0, i18n.T("cmd.php.infection.flag.threads")) - infectionCmd.Flags().StringVar(&infectionFilter, "filter", "", i18n.T("cmd.php.infection.flag.filter")) - infectionCmd.Flags().BoolVar(&infectionOnlyCovered, "only-covered", false, i18n.T("cmd.php.infection.flag.only_covered")) - - parent.AddCommand(infectionCmd) -} - -func getSeverityStyle(severity string) *cli.AnsiStyle { - switch strings.ToLower(severity) { - case "critical": - return phpSecurityCriticalStyle - case "high": - return phpSecurityHighStyle - case "medium": - return phpSecurityMediumStyle - case "low": - return phpSecurityLowStyle - default: - return dimStyle - } -} diff --git a/pkg/php/container.go b/pkg/php/container.go deleted file mode 100644 index 37a1d73..0000000 --- a/pkg/php/container.go +++ /dev/null @@ -1,449 +0,0 @@ -package php - -import ( - "context" - "io" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/cli" -) - -// DockerBuildOptions configures Docker image building for PHP projects. -type DockerBuildOptions struct { - // ProjectDir is the path to the PHP/Laravel project. - ProjectDir string - - // ImageName is the name for the Docker image. - ImageName string - - // Tag is the image tag (default: "latest"). - Tag string - - // Platform specifies the target platform (e.g., "linux/amd64", "linux/arm64"). - Platform string - - // Dockerfile is the path to a custom Dockerfile. - // If empty, one will be auto-generated for FrankenPHP. - Dockerfile string - - // NoBuildCache disables Docker build cache. - NoBuildCache bool - - // BuildArgs are additional build arguments. - BuildArgs map[string]string - - // Output is the writer for build output (default: os.Stdout). - Output io.Writer -} - -// LinuxKitBuildOptions configures LinuxKit image building for PHP projects. -type LinuxKitBuildOptions struct { - // ProjectDir is the path to the PHP/Laravel project. - ProjectDir string - - // OutputPath is the path for the output image. - OutputPath string - - // Format is the output format: "iso", "qcow2", "raw", "vmdk". - Format string - - // Template is the LinuxKit template name (default: "server-php"). - Template string - - // Variables are template variables to apply. - Variables map[string]string - - // Output is the writer for build output (default: os.Stdout). - Output io.Writer -} - -// ServeOptions configures running a production PHP container. -type ServeOptions struct { - // ImageName is the Docker image to run. - ImageName string - - // Tag is the image tag (default: "latest"). - Tag string - - // ContainerName is the name for the container. - ContainerName string - - // Port is the host port to bind (default: 80). - Port int - - // HTTPSPort is the host HTTPS port to bind (default: 443). - HTTPSPort int - - // Detach runs the container in detached mode. - Detach bool - - // EnvFile is the path to an environment file. - EnvFile string - - // Volumes maps host paths to container paths. - Volumes map[string]string - - // Output is the writer for output (default: os.Stdout). - Output io.Writer -} - -// BuildDocker builds a Docker image for the PHP project. -func BuildDocker(ctx context.Context, opts DockerBuildOptions) error { - if opts.ProjectDir == "" { - cwd, err := os.Getwd() - if err != nil { - return cli.WrapVerb(err, "get", "working directory") - } - opts.ProjectDir = cwd - } - - // Validate project directory - if !IsPHPProject(opts.ProjectDir) { - return cli.Err("not a PHP project: %s (missing composer.json)", opts.ProjectDir) - } - - // Set defaults - if opts.ImageName == "" { - opts.ImageName = filepath.Base(opts.ProjectDir) - } - if opts.Tag == "" { - opts.Tag = "latest" - } - if opts.Output == nil { - opts.Output = os.Stdout - } - - // Determine Dockerfile path - dockerfilePath := opts.Dockerfile - var tempDockerfile string - - if dockerfilePath == "" { - // Generate Dockerfile - content, err := GenerateDockerfile(opts.ProjectDir) - if err != nil { - return cli.WrapVerb(err, "generate", "Dockerfile") - } - - // Write to temporary file - tempDockerfile = filepath.Join(opts.ProjectDir, "Dockerfile.core-generated") - if err := os.WriteFile(tempDockerfile, []byte(content), 0644); err != nil { - return cli.WrapVerb(err, "write", "Dockerfile") - } - defer os.Remove(tempDockerfile) - - dockerfilePath = tempDockerfile - } - - // Build Docker image - imageRef := cli.Sprintf("%s:%s", opts.ImageName, opts.Tag) - - args := []string{"build", "-t", imageRef, "-f", dockerfilePath} - - if opts.Platform != "" { - args = append(args, "--platform", opts.Platform) - } - - if opts.NoBuildCache { - args = append(args, "--no-cache") - } - - for key, value := range opts.BuildArgs { - args = append(args, "--build-arg", cli.Sprintf("%s=%s", key, value)) - } - - args = append(args, opts.ProjectDir) - - cmd := exec.CommandContext(ctx, "docker", args...) - cmd.Dir = opts.ProjectDir - cmd.Stdout = opts.Output - cmd.Stderr = opts.Output - - if err := cmd.Run(); err != nil { - return cli.Wrap(err, "docker build failed") - } - - return nil -} - -// BuildLinuxKit builds a LinuxKit image for the PHP project. -func BuildLinuxKit(ctx context.Context, opts LinuxKitBuildOptions) error { - if opts.ProjectDir == "" { - cwd, err := os.Getwd() - if err != nil { - return cli.WrapVerb(err, "get", "working directory") - } - opts.ProjectDir = cwd - } - - // Validate project directory - if !IsPHPProject(opts.ProjectDir) { - return cli.Err("not a PHP project: %s (missing composer.json)", opts.ProjectDir) - } - - // Set defaults - if opts.Template == "" { - opts.Template = "server-php" - } - if opts.Format == "" { - opts.Format = "qcow2" - } - if opts.OutputPath == "" { - opts.OutputPath = filepath.Join(opts.ProjectDir, "dist", filepath.Base(opts.ProjectDir)) - } - if opts.Output == nil { - opts.Output = os.Stdout - } - - // Ensure output directory exists - outputDir := filepath.Dir(opts.OutputPath) - if err := os.MkdirAll(outputDir, 0755); err != nil { - return cli.WrapVerb(err, "create", "output directory") - } - - // Find linuxkit binary - linuxkitPath, err := lookupLinuxKit() - if err != nil { - return err - } - - // Get template content - templateContent, err := getLinuxKitTemplate(opts.Template) - if err != nil { - return cli.WrapVerb(err, "get", "template") - } - - // Apply variables - if opts.Variables == nil { - opts.Variables = make(map[string]string) - } - // Add project-specific variables - opts.Variables["PROJECT_DIR"] = opts.ProjectDir - opts.Variables["PROJECT_NAME"] = filepath.Base(opts.ProjectDir) - - content, err := applyTemplateVariables(templateContent, opts.Variables) - if err != nil { - return cli.WrapVerb(err, "apply", "template variables") - } - - // Write template to temp file - tempYAML := filepath.Join(opts.ProjectDir, ".core-linuxkit.yml") - if err := os.WriteFile(tempYAML, []byte(content), 0644); err != nil { - return cli.WrapVerb(err, "write", "template") - } - defer os.Remove(tempYAML) - - // Build LinuxKit image - args := []string{ - "build", - "--format", opts.Format, - "--name", opts.OutputPath, - tempYAML, - } - - cmd := exec.CommandContext(ctx, linuxkitPath, args...) - cmd.Dir = opts.ProjectDir - cmd.Stdout = opts.Output - cmd.Stderr = opts.Output - - if err := cmd.Run(); err != nil { - return cli.Wrap(err, "linuxkit build failed") - } - - return nil -} - -// ServeProduction runs a production PHP container. -func ServeProduction(ctx context.Context, opts ServeOptions) error { - if opts.ImageName == "" { - return cli.Err("image name is required") - } - - // Set defaults - if opts.Tag == "" { - opts.Tag = "latest" - } - if opts.Port == 0 { - opts.Port = 80 - } - if opts.HTTPSPort == 0 { - opts.HTTPSPort = 443 - } - if opts.Output == nil { - opts.Output = os.Stdout - } - - imageRef := cli.Sprintf("%s:%s", opts.ImageName, opts.Tag) - - args := []string{"run"} - - if opts.Detach { - args = append(args, "-d") - } else { - args = append(args, "--rm") - } - - if opts.ContainerName != "" { - args = append(args, "--name", opts.ContainerName) - } - - // Port mappings - args = append(args, "-p", cli.Sprintf("%d:80", opts.Port)) - args = append(args, "-p", cli.Sprintf("%d:443", opts.HTTPSPort)) - - // Environment file - if opts.EnvFile != "" { - args = append(args, "--env-file", opts.EnvFile) - } - - // Volume mounts - for hostPath, containerPath := range opts.Volumes { - args = append(args, "-v", cli.Sprintf("%s:%s", hostPath, containerPath)) - } - - args = append(args, imageRef) - - cmd := exec.CommandContext(ctx, "docker", args...) - cmd.Stdout = opts.Output - cmd.Stderr = opts.Output - - if opts.Detach { - output, err := cmd.Output() - if err != nil { - return cli.WrapVerb(err, "start", "container") - } - containerID := strings.TrimSpace(string(output)) - cli.Print("Container started: %s\n", containerID[:12]) - return nil - } - - return cmd.Run() -} - -// Shell opens a shell in a running container. -func Shell(ctx context.Context, containerID string) error { - if containerID == "" { - return cli.Err("container ID is required") - } - - // Resolve partial container ID - fullID, err := resolveDockerContainerID(ctx, containerID) - if err != nil { - return err - } - - cmd := exec.CommandContext(ctx, "docker", "exec", "-it", fullID, "/bin/sh") - cmd.Stdin = os.Stdin - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - return cmd.Run() -} - -// IsPHPProject checks if the given directory is a PHP project. -func IsPHPProject(dir string) bool { - composerPath := filepath.Join(dir, "composer.json") - _, err := os.Stat(composerPath) - return err == nil -} - -// commonLinuxKitPaths defines default search locations for linuxkit. -var commonLinuxKitPaths = []string{ - "/usr/local/bin/linuxkit", - "/opt/homebrew/bin/linuxkit", -} - -// lookupLinuxKit finds the linuxkit binary. -func lookupLinuxKit() (string, error) { - // Check PATH first - if path, err := exec.LookPath("linuxkit"); err == nil { - return path, nil - } - - for _, p := range commonLinuxKitPaths { - if _, err := os.Stat(p); err == nil { - return p, nil - } - } - - return "", cli.Err("linuxkit not found. Install with: brew install linuxkit (macOS) or see https://github.com/linuxkit/linuxkit") -} - -// getLinuxKitTemplate retrieves a LinuxKit template by name. -func getLinuxKitTemplate(name string) (string, error) { - // Default server-php template for PHP projects - if name == "server-php" { - return defaultServerPHPTemplate, nil - } - - // Try to load from container package templates - // This would integrate with github.com/host-uk/core/pkg/container - return "", cli.Err("template not found: %s", name) -} - -// applyTemplateVariables applies variable substitution to template content. -func applyTemplateVariables(content string, vars map[string]string) (string, error) { - result := content - for key, value := range vars { - placeholder := "${" + key + "}" - result = strings.ReplaceAll(result, placeholder, value) - } - return result, nil -} - -// resolveDockerContainerID resolves a partial container ID to a full ID. -func resolveDockerContainerID(ctx context.Context, partialID string) (string, error) { - cmd := exec.CommandContext(ctx, "docker", "ps", "-a", "--no-trunc", "--format", "{{.ID}}") - output, err := cmd.Output() - if err != nil { - return "", cli.WrapVerb(err, "list", "containers") - } - - lines := strings.Split(strings.TrimSpace(string(output)), "\n") - var matches []string - - for _, line := range lines { - if strings.HasPrefix(line, partialID) { - matches = append(matches, line) - } - } - - switch len(matches) { - case 0: - return "", cli.Err("no container found matching: %s", partialID) - case 1: - return matches[0], nil - default: - return "", cli.Err("multiple containers match '%s', be more specific", partialID) - } -} - -// defaultServerPHPTemplate is the default LinuxKit template for PHP servers. -const defaultServerPHPTemplate = `# LinuxKit configuration for PHP/FrankenPHP server -kernel: - image: linuxkit/kernel:6.6.13 - cmdline: "console=tty0 console=ttyS0" -init: - - linuxkit/init:v1.0.1 - - linuxkit/runc:v1.0.1 - - linuxkit/containerd:v1.0.1 -onboot: - - name: sysctl - image: linuxkit/sysctl:v1.0.1 - - name: dhcpcd - image: linuxkit/dhcpcd:v1.0.1 - command: ["/sbin/dhcpcd", "--nobackground", "-f", "/dhcpcd.conf"] -services: - - name: getty - image: linuxkit/getty:v1.0.1 - env: - - INSECURE=true - - name: sshd - image: linuxkit/sshd:v1.0.1 -files: - - path: etc/ssh/authorized_keys - contents: | - ${SSH_KEY:-} -` diff --git a/pkg/php/container_test.go b/pkg/php/container_test.go deleted file mode 100644 index f1a2c5c..0000000 --- a/pkg/php/container_test.go +++ /dev/null @@ -1,382 +0,0 @@ -package php - -import ( - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestDockerBuildOptions_Good(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - opts := DockerBuildOptions{ - ProjectDir: "/project", - ImageName: "myapp", - Tag: "v1.0.0", - Platform: "linux/amd64", - Dockerfile: "/path/to/Dockerfile", - NoBuildCache: true, - BuildArgs: map[string]string{"ARG1": "value1"}, - Output: os.Stdout, - } - - assert.Equal(t, "/project", opts.ProjectDir) - assert.Equal(t, "myapp", opts.ImageName) - assert.Equal(t, "v1.0.0", opts.Tag) - assert.Equal(t, "linux/amd64", opts.Platform) - assert.Equal(t, "/path/to/Dockerfile", opts.Dockerfile) - assert.True(t, opts.NoBuildCache) - assert.Equal(t, "value1", opts.BuildArgs["ARG1"]) - assert.NotNil(t, opts.Output) - }) -} - -func TestLinuxKitBuildOptions_Good(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - opts := LinuxKitBuildOptions{ - ProjectDir: "/project", - OutputPath: "/output/image.qcow2", - Format: "qcow2", - Template: "server-php", - Variables: map[string]string{"VAR1": "value1"}, - Output: os.Stdout, - } - - assert.Equal(t, "/project", opts.ProjectDir) - assert.Equal(t, "/output/image.qcow2", opts.OutputPath) - assert.Equal(t, "qcow2", opts.Format) - assert.Equal(t, "server-php", opts.Template) - assert.Equal(t, "value1", opts.Variables["VAR1"]) - assert.NotNil(t, opts.Output) - }) -} - -func TestServeOptions_Good(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - opts := ServeOptions{ - ImageName: "myapp", - Tag: "latest", - ContainerName: "myapp-container", - Port: 8080, - HTTPSPort: 8443, - Detach: true, - EnvFile: "/path/to/.env", - Volumes: map[string]string{"/host": "/container"}, - Output: os.Stdout, - } - - assert.Equal(t, "myapp", opts.ImageName) - assert.Equal(t, "latest", opts.Tag) - assert.Equal(t, "myapp-container", opts.ContainerName) - assert.Equal(t, 8080, opts.Port) - assert.Equal(t, 8443, opts.HTTPSPort) - assert.True(t, opts.Detach) - assert.Equal(t, "/path/to/.env", opts.EnvFile) - assert.Equal(t, "/container", opts.Volumes["/host"]) - assert.NotNil(t, opts.Output) - }) -} - -func TestIsPHPProject_Container_Good(t *testing.T) { - t.Run("returns true with composer.json", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(`{}`), 0644) - require.NoError(t, err) - - assert.True(t, IsPHPProject(dir)) - }) -} - -func TestIsPHPProject_Container_Bad(t *testing.T) { - t.Run("returns false without composer.json", func(t *testing.T) { - dir := t.TempDir() - assert.False(t, IsPHPProject(dir)) - }) - - t.Run("returns false for non-existent directory", func(t *testing.T) { - assert.False(t, IsPHPProject("/non/existent/path")) - }) -} - -func TestLookupLinuxKit_Bad(t *testing.T) { - t.Run("returns error when linuxkit not found", func(t *testing.T) { - // Save original PATH and paths - origPath := os.Getenv("PATH") - origCommonPaths := commonLinuxKitPaths - defer func() { - os.Setenv("PATH", origPath) - commonLinuxKitPaths = origCommonPaths - }() - - // Set PATH to empty and clear common paths - os.Setenv("PATH", "") - commonLinuxKitPaths = []string{} - - _, err := lookupLinuxKit() - if assert.Error(t, err) { - assert.Contains(t, err.Error(), "linuxkit not found") - } - }) -} - -func TestGetLinuxKitTemplate_Good(t *testing.T) { - t.Run("returns server-php template", func(t *testing.T) { - content, err := getLinuxKitTemplate("server-php") - assert.NoError(t, err) - assert.Contains(t, content, "kernel:") - assert.Contains(t, content, "linuxkit/kernel") - }) -} - -func TestGetLinuxKitTemplate_Bad(t *testing.T) { - t.Run("returns error for unknown template", func(t *testing.T) { - _, err := getLinuxKitTemplate("unknown-template") - assert.Error(t, err) - assert.Contains(t, err.Error(), "template not found") - }) -} - -func TestApplyTemplateVariables_Good(t *testing.T) { - t.Run("replaces variables", func(t *testing.T) { - content := "Hello ${NAME}, welcome to ${PLACE}!" - vars := map[string]string{ - "NAME": "World", - "PLACE": "Earth", - } - - result, err := applyTemplateVariables(content, vars) - assert.NoError(t, err) - assert.Equal(t, "Hello World, welcome to Earth!", result) - }) - - t.Run("handles empty variables", func(t *testing.T) { - content := "No variables here" - vars := map[string]string{} - - result, err := applyTemplateVariables(content, vars) - assert.NoError(t, err) - assert.Equal(t, "No variables here", result) - }) - - t.Run("leaves unmatched placeholders", func(t *testing.T) { - content := "Hello ${NAME}, ${UNKNOWN} is unknown" - vars := map[string]string{ - "NAME": "World", - } - - result, err := applyTemplateVariables(content, vars) - assert.NoError(t, err) - assert.Contains(t, result, "Hello World") - assert.Contains(t, result, "${UNKNOWN}") - }) - - t.Run("handles multiple occurrences", func(t *testing.T) { - content := "${VAR} and ${VAR} again" - vars := map[string]string{ - "VAR": "value", - } - - result, err := applyTemplateVariables(content, vars) - assert.NoError(t, err) - assert.Equal(t, "value and value again", result) - }) -} - -func TestDefaultServerPHPTemplate_Good(t *testing.T) { - t.Run("template has required sections", func(t *testing.T) { - assert.Contains(t, defaultServerPHPTemplate, "kernel:") - assert.Contains(t, defaultServerPHPTemplate, "init:") - assert.Contains(t, defaultServerPHPTemplate, "services:") - assert.Contains(t, defaultServerPHPTemplate, "onboot:") - }) - - t.Run("template contains placeholders", func(t *testing.T) { - assert.Contains(t, defaultServerPHPTemplate, "${SSH_KEY:-}") - }) -} - -func TestBuildDocker_Bad(t *testing.T) { - t.Skip("requires Docker installed") - - t.Run("fails for non-PHP project", func(t *testing.T) { - dir := t.TempDir() - err := BuildDocker(nil, DockerBuildOptions{ProjectDir: dir}) - assert.Error(t, err) - assert.Contains(t, err.Error(), "not a PHP project") - }) -} - -func TestBuildLinuxKit_Bad(t *testing.T) { - t.Skip("requires linuxkit installed") - - t.Run("fails for non-PHP project", func(t *testing.T) { - dir := t.TempDir() - err := BuildLinuxKit(nil, LinuxKitBuildOptions{ProjectDir: dir}) - assert.Error(t, err) - assert.Contains(t, err.Error(), "not a PHP project") - }) -} - -func TestServeProduction_Bad(t *testing.T) { - t.Run("fails without image name", func(t *testing.T) { - err := ServeProduction(nil, ServeOptions{}) - assert.Error(t, err) - assert.Contains(t, err.Error(), "image name is required") - }) -} - -func TestShell_Bad(t *testing.T) { - t.Run("fails without container ID", func(t *testing.T) { - err := Shell(nil, "") - assert.Error(t, err) - assert.Contains(t, err.Error(), "container ID is required") - }) -} - -func TestResolveDockerContainerID_Bad(t *testing.T) { - t.Skip("requires Docker installed") -} - -func TestBuildDocker_DefaultOptions(t *testing.T) { - t.Run("sets defaults correctly", func(t *testing.T) { - // This tests the default logic without actually running Docker - opts := DockerBuildOptions{} - - // Verify default values would be set in BuildDocker - if opts.Tag == "" { - opts.Tag = "latest" - } - assert.Equal(t, "latest", opts.Tag) - - if opts.ImageName == "" { - opts.ImageName = filepath.Base("/project/myapp") - } - assert.Equal(t, "myapp", opts.ImageName) - }) -} - -func TestBuildLinuxKit_DefaultOptions(t *testing.T) { - t.Run("sets defaults correctly", func(t *testing.T) { - opts := LinuxKitBuildOptions{} - - // Verify default values would be set - if opts.Template == "" { - opts.Template = "server-php" - } - assert.Equal(t, "server-php", opts.Template) - - if opts.Format == "" { - opts.Format = "qcow2" - } - assert.Equal(t, "qcow2", opts.Format) - }) -} - -func TestServeProduction_DefaultOptions(t *testing.T) { - t.Run("sets defaults correctly", func(t *testing.T) { - opts := ServeOptions{ImageName: "myapp"} - - // Verify default values would be set - if opts.Tag == "" { - opts.Tag = "latest" - } - assert.Equal(t, "latest", opts.Tag) - - if opts.Port == 0 { - opts.Port = 80 - } - assert.Equal(t, 80, opts.Port) - - if opts.HTTPSPort == 0 { - opts.HTTPSPort = 443 - } - assert.Equal(t, 443, opts.HTTPSPort) - }) -} - -func TestLookupLinuxKit_Good(t *testing.T) { - t.Skip("requires linuxkit installed") - - t.Run("finds linuxkit in PATH", func(t *testing.T) { - path, err := lookupLinuxKit() - assert.NoError(t, err) - assert.NotEmpty(t, path) - }) -} - -func TestBuildDocker_WithCustomDockerfile(t *testing.T) { - t.Skip("requires Docker installed") - - t.Run("uses custom Dockerfile when provided", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(`{"name":"test"}`), 0644) - require.NoError(t, err) - - dockerfilePath := filepath.Join(dir, "Dockerfile.custom") - err = os.WriteFile(dockerfilePath, []byte("FROM alpine"), 0644) - require.NoError(t, err) - - opts := DockerBuildOptions{ - ProjectDir: dir, - Dockerfile: dockerfilePath, - } - - // The function would use the custom Dockerfile - assert.Equal(t, dockerfilePath, opts.Dockerfile) - }) -} - -func TestBuildDocker_GeneratesDockerfile(t *testing.T) { - t.Skip("requires Docker installed") - - t.Run("generates Dockerfile when not provided", func(t *testing.T) { - dir := t.TempDir() - - // Create valid PHP project - composerJSON := `{"name":"test","require":{"php":"^8.2","laravel/framework":"^11.0"}}` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - opts := DockerBuildOptions{ - ProjectDir: dir, - // Dockerfile not specified - should be generated - } - - assert.Empty(t, opts.Dockerfile) - }) -} - -func TestServeProduction_BuildsCorrectArgs(t *testing.T) { - t.Run("builds correct docker run arguments", func(t *testing.T) { - opts := ServeOptions{ - ImageName: "myapp", - Tag: "v1.0.0", - ContainerName: "myapp-prod", - Port: 8080, - HTTPSPort: 8443, - Detach: true, - EnvFile: "/path/.env", - Volumes: map[string]string{ - "/host/storage": "/app/storage", - }, - } - - // Verify the expected image reference format - imageRef := opts.ImageName + ":" + opts.Tag - assert.Equal(t, "myapp:v1.0.0", imageRef) - - // Verify port format - portMapping := opts.Port - assert.Equal(t, 8080, portMapping) - }) -} - -func TestShell_Integration(t *testing.T) { - t.Skip("requires Docker with running container") -} - -func TestResolveDockerContainerID_Integration(t *testing.T) { - t.Skip("requires Docker with running containers") -} diff --git a/pkg/php/coolify.go b/pkg/php/coolify.go deleted file mode 100644 index fe2e59b..0000000 --- a/pkg/php/coolify.go +++ /dev/null @@ -1,355 +0,0 @@ -package php - -import ( - "bytes" - "context" - "encoding/json" - "io" - "net/http" - "os" - "path/filepath" - "strings" - "time" - - "github.com/host-uk/core/pkg/cli" -) - -// CoolifyClient is an HTTP client for the Coolify API. -type CoolifyClient struct { - BaseURL string - Token string - HTTPClient *http.Client -} - -// CoolifyConfig holds configuration loaded from environment. -type CoolifyConfig struct { - URL string - Token string - AppID string - StagingAppID string -} - -// CoolifyDeployment represents a deployment from the Coolify API. -type CoolifyDeployment struct { - ID string `json:"id"` - Status string `json:"status"` - CommitSHA string `json:"commit_sha,omitempty"` - CommitMsg string `json:"commit_message,omitempty"` - Branch string `json:"branch,omitempty"` - CreatedAt time.Time `json:"created_at"` - FinishedAt time.Time `json:"finished_at,omitempty"` - Log string `json:"log,omitempty"` - DeployedURL string `json:"deployed_url,omitempty"` -} - -// CoolifyApp represents an application from the Coolify API. -type CoolifyApp struct { - ID string `json:"id"` - Name string `json:"name"` - FQDN string `json:"fqdn,omitempty"` - Status string `json:"status,omitempty"` - Repository string `json:"repository,omitempty"` - Branch string `json:"branch,omitempty"` - Environment string `json:"environment,omitempty"` -} - -// NewCoolifyClient creates a new Coolify API client. -func NewCoolifyClient(baseURL, token string) *CoolifyClient { - // Ensure baseURL doesn't have trailing slash - baseURL = strings.TrimSuffix(baseURL, "/") - - return &CoolifyClient{ - BaseURL: baseURL, - Token: token, - HTTPClient: &http.Client{ - Timeout: 30 * time.Second, - }, - } -} - -// LoadCoolifyConfig loads Coolify configuration from .env file in the given directory. -func LoadCoolifyConfig(dir string) (*CoolifyConfig, error) { - envPath := filepath.Join(dir, ".env") - return LoadCoolifyConfigFromFile(envPath) -} - -// LoadCoolifyConfigFromFile loads Coolify configuration from a specific .env file. -func LoadCoolifyConfigFromFile(path string) (*CoolifyConfig, error) { - config := &CoolifyConfig{} - - // First try environment variables - config.URL = os.Getenv("COOLIFY_URL") - config.Token = os.Getenv("COOLIFY_TOKEN") - config.AppID = os.Getenv("COOLIFY_APP_ID") - config.StagingAppID = os.Getenv("COOLIFY_STAGING_APP_ID") - - // Then try .env file - file, err := os.Open(path) - if err != nil { - if os.IsNotExist(err) { - // No .env file, just use env vars - return validateCoolifyConfig(config) - } - return nil, cli.WrapVerb(err, "open", ".env file") - } - defer file.Close() - - content, err := io.ReadAll(file) - if err != nil { - return nil, cli.WrapVerb(err, "read", ".env file") - } - - // Parse .env file - lines := strings.Split(string(content), "\n") - for _, line := range lines { - line = strings.TrimSpace(line) - if line == "" || strings.HasPrefix(line, "#") { - continue - } - - parts := strings.SplitN(line, "=", 2) - if len(parts) != 2 { - continue - } - - key := strings.TrimSpace(parts[0]) - value := strings.TrimSpace(parts[1]) - // Remove quotes if present - value = strings.Trim(value, `"'`) - - // Only override if not already set from env - switch key { - case "COOLIFY_URL": - if config.URL == "" { - config.URL = value - } - case "COOLIFY_TOKEN": - if config.Token == "" { - config.Token = value - } - case "COOLIFY_APP_ID": - if config.AppID == "" { - config.AppID = value - } - case "COOLIFY_STAGING_APP_ID": - if config.StagingAppID == "" { - config.StagingAppID = value - } - } - } - - return validateCoolifyConfig(config) -} - -// validateCoolifyConfig checks that required fields are set. -func validateCoolifyConfig(config *CoolifyConfig) (*CoolifyConfig, error) { - if config.URL == "" { - return nil, cli.Err("COOLIFY_URL is not set") - } - if config.Token == "" { - return nil, cli.Err("COOLIFY_TOKEN is not set") - } - return config, nil -} - -// TriggerDeploy triggers a deployment for the specified application. -func (c *CoolifyClient) TriggerDeploy(ctx context.Context, appID string, force bool) (*CoolifyDeployment, error) { - endpoint := cli.Sprintf("%s/api/v1/applications/%s/deploy", c.BaseURL, appID) - - payload := map[string]interface{}{} - if force { - payload["force"] = true - } - - body, err := json.Marshal(payload) - if err != nil { - return nil, cli.WrapVerb(err, "marshal", "request") - } - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(body)) - if err != nil { - return nil, cli.WrapVerb(err, "create", "request") - } - - c.setHeaders(req) - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return nil, cli.Wrap(err, "request failed") - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusCreated && resp.StatusCode != http.StatusAccepted { - return nil, c.parseError(resp) - } - - var deployment CoolifyDeployment - if err := json.NewDecoder(resp.Body).Decode(&deployment); err != nil { - // Some Coolify versions return minimal response - return &CoolifyDeployment{ - Status: "queued", - CreatedAt: time.Now(), - }, nil - } - - return &deployment, nil -} - -// GetDeployment retrieves a specific deployment by ID. -func (c *CoolifyClient) GetDeployment(ctx context.Context, appID, deploymentID string) (*CoolifyDeployment, error) { - endpoint := cli.Sprintf("%s/api/v1/applications/%s/deployments/%s", c.BaseURL, appID, deploymentID) - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) - if err != nil { - return nil, cli.WrapVerb(err, "create", "request") - } - - c.setHeaders(req) - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return nil, cli.Wrap(err, "request failed") - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, c.parseError(resp) - } - - var deployment CoolifyDeployment - if err := json.NewDecoder(resp.Body).Decode(&deployment); err != nil { - return nil, cli.WrapVerb(err, "decode", "response") - } - - return &deployment, nil -} - -// ListDeployments retrieves deployments for an application. -func (c *CoolifyClient) ListDeployments(ctx context.Context, appID string, limit int) ([]CoolifyDeployment, error) { - endpoint := cli.Sprintf("%s/api/v1/applications/%s/deployments", c.BaseURL, appID) - if limit > 0 { - endpoint = cli.Sprintf("%s?limit=%d", endpoint, limit) - } - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) - if err != nil { - return nil, cli.WrapVerb(err, "create", "request") - } - - c.setHeaders(req) - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return nil, cli.Wrap(err, "request failed") - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, c.parseError(resp) - } - - var deployments []CoolifyDeployment - if err := json.NewDecoder(resp.Body).Decode(&deployments); err != nil { - return nil, cli.WrapVerb(err, "decode", "response") - } - - return deployments, nil -} - -// Rollback triggers a rollback to a previous deployment. -func (c *CoolifyClient) Rollback(ctx context.Context, appID, deploymentID string) (*CoolifyDeployment, error) { - endpoint := cli.Sprintf("%s/api/v1/applications/%s/rollback", c.BaseURL, appID) - - payload := map[string]interface{}{ - "deployment_id": deploymentID, - } - - body, err := json.Marshal(payload) - if err != nil { - return nil, cli.WrapVerb(err, "marshal", "request") - } - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(body)) - if err != nil { - return nil, cli.WrapVerb(err, "create", "request") - } - - c.setHeaders(req) - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return nil, cli.Wrap(err, "request failed") - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusCreated && resp.StatusCode != http.StatusAccepted { - return nil, c.parseError(resp) - } - - var deployment CoolifyDeployment - if err := json.NewDecoder(resp.Body).Decode(&deployment); err != nil { - return &CoolifyDeployment{ - Status: "rolling_back", - CreatedAt: time.Now(), - }, nil - } - - return &deployment, nil -} - -// GetApp retrieves application details. -func (c *CoolifyClient) GetApp(ctx context.Context, appID string) (*CoolifyApp, error) { - endpoint := cli.Sprintf("%s/api/v1/applications/%s", c.BaseURL, appID) - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) - if err != nil { - return nil, cli.WrapVerb(err, "create", "request") - } - - c.setHeaders(req) - - resp, err := c.HTTPClient.Do(req) - if err != nil { - return nil, cli.Wrap(err, "request failed") - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, c.parseError(resp) - } - - var app CoolifyApp - if err := json.NewDecoder(resp.Body).Decode(&app); err != nil { - return nil, cli.WrapVerb(err, "decode", "response") - } - - return &app, nil -} - -// setHeaders sets common headers for API requests. -func (c *CoolifyClient) setHeaders(req *http.Request) { - req.Header.Set("Authorization", "Bearer "+c.Token) - req.Header.Set("Content-Type", "application/json") - req.Header.Set("Accept", "application/json") -} - -// parseError extracts error information from an API response. -func (c *CoolifyClient) parseError(resp *http.Response) error { - body, _ := io.ReadAll(resp.Body) - - var errResp struct { - Message string `json:"message"` - Error string `json:"error"` - } - - if err := json.Unmarshal(body, &errResp); err == nil { - if errResp.Message != "" { - return cli.Err("API error (%d): %s", resp.StatusCode, errResp.Message) - } - if errResp.Error != "" { - return cli.Err("API error (%d): %s", resp.StatusCode, errResp.Error) - } - } - - return cli.Err("API error (%d): %s", resp.StatusCode, string(body)) -} diff --git a/pkg/php/coolify_test.go b/pkg/php/coolify_test.go deleted file mode 100644 index 3747795..0000000 --- a/pkg/php/coolify_test.go +++ /dev/null @@ -1,502 +0,0 @@ -package php - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "os" - "path/filepath" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestCoolifyClient_Good(t *testing.T) { - t.Run("creates client with correct base URL", func(t *testing.T) { - client := NewCoolifyClient("https://coolify.example.com", "token") - - assert.Equal(t, "https://coolify.example.com", client.BaseURL) - assert.Equal(t, "token", client.Token) - assert.NotNil(t, client.HTTPClient) - }) - - t.Run("strips trailing slash from base URL", func(t *testing.T) { - client := NewCoolifyClient("https://coolify.example.com/", "token") - assert.Equal(t, "https://coolify.example.com", client.BaseURL) - }) - - t.Run("http client has timeout", func(t *testing.T) { - client := NewCoolifyClient("https://coolify.example.com", "token") - assert.Equal(t, 30*time.Second, client.HTTPClient.Timeout) - }) -} - -func TestCoolifyConfig_Good(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - config := CoolifyConfig{ - URL: "https://coolify.example.com", - Token: "secret-token", - AppID: "app-123", - StagingAppID: "staging-456", - } - - assert.Equal(t, "https://coolify.example.com", config.URL) - assert.Equal(t, "secret-token", config.Token) - assert.Equal(t, "app-123", config.AppID) - assert.Equal(t, "staging-456", config.StagingAppID) - }) -} - -func TestCoolifyDeployment_Good(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - now := time.Now() - deployment := CoolifyDeployment{ - ID: "dep-123", - Status: "finished", - CommitSHA: "abc123", - CommitMsg: "Test commit", - Branch: "main", - CreatedAt: now, - FinishedAt: now.Add(5 * time.Minute), - Log: "Build successful", - DeployedURL: "https://app.example.com", - } - - assert.Equal(t, "dep-123", deployment.ID) - assert.Equal(t, "finished", deployment.Status) - assert.Equal(t, "abc123", deployment.CommitSHA) - assert.Equal(t, "Test commit", deployment.CommitMsg) - assert.Equal(t, "main", deployment.Branch) - }) -} - -func TestCoolifyApp_Good(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - app := CoolifyApp{ - ID: "app-123", - Name: "MyApp", - FQDN: "https://myapp.example.com", - Status: "running", - Repository: "https://github.com/user/repo", - Branch: "main", - Environment: "production", - } - - assert.Equal(t, "app-123", app.ID) - assert.Equal(t, "MyApp", app.Name) - assert.Equal(t, "https://myapp.example.com", app.FQDN) - assert.Equal(t, "running", app.Status) - }) -} - -func TestLoadCoolifyConfigFromFile_Good(t *testing.T) { - t.Run("loads config from .env file", func(t *testing.T) { - dir := t.TempDir() - envContent := `COOLIFY_URL=https://coolify.example.com -COOLIFY_TOKEN=secret-token -COOLIFY_APP_ID=app-123 -COOLIFY_STAGING_APP_ID=staging-456` - - err := os.WriteFile(filepath.Join(dir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - config, err := LoadCoolifyConfigFromFile(filepath.Join(dir, ".env")) - assert.NoError(t, err) - assert.Equal(t, "https://coolify.example.com", config.URL) - assert.Equal(t, "secret-token", config.Token) - assert.Equal(t, "app-123", config.AppID) - assert.Equal(t, "staging-456", config.StagingAppID) - }) - - t.Run("handles quoted values", func(t *testing.T) { - dir := t.TempDir() - envContent := `COOLIFY_URL="https://coolify.example.com" -COOLIFY_TOKEN='secret-token'` - - err := os.WriteFile(filepath.Join(dir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - config, err := LoadCoolifyConfigFromFile(filepath.Join(dir, ".env")) - assert.NoError(t, err) - assert.Equal(t, "https://coolify.example.com", config.URL) - assert.Equal(t, "secret-token", config.Token) - }) - - t.Run("ignores comments", func(t *testing.T) { - dir := t.TempDir() - envContent := `# This is a comment -COOLIFY_URL=https://coolify.example.com -# COOLIFY_TOKEN=wrong-token -COOLIFY_TOKEN=correct-token` - - err := os.WriteFile(filepath.Join(dir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - config, err := LoadCoolifyConfigFromFile(filepath.Join(dir, ".env")) - assert.NoError(t, err) - assert.Equal(t, "correct-token", config.Token) - }) - - t.Run("ignores blank lines", func(t *testing.T) { - dir := t.TempDir() - envContent := `COOLIFY_URL=https://coolify.example.com - -COOLIFY_TOKEN=secret-token` - - err := os.WriteFile(filepath.Join(dir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - config, err := LoadCoolifyConfigFromFile(filepath.Join(dir, ".env")) - assert.NoError(t, err) - assert.Equal(t, "https://coolify.example.com", config.URL) - }) -} - -func TestLoadCoolifyConfigFromFile_Bad(t *testing.T) { - t.Run("fails when COOLIFY_URL missing", func(t *testing.T) { - dir := t.TempDir() - envContent := `COOLIFY_TOKEN=secret-token` - - err := os.WriteFile(filepath.Join(dir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - _, err = LoadCoolifyConfigFromFile(filepath.Join(dir, ".env")) - assert.Error(t, err) - assert.Contains(t, err.Error(), "COOLIFY_URL is not set") - }) - - t.Run("fails when COOLIFY_TOKEN missing", func(t *testing.T) { - dir := t.TempDir() - envContent := `COOLIFY_URL=https://coolify.example.com` - - err := os.WriteFile(filepath.Join(dir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - _, err = LoadCoolifyConfigFromFile(filepath.Join(dir, ".env")) - assert.Error(t, err) - assert.Contains(t, err.Error(), "COOLIFY_TOKEN is not set") - }) -} - -func TestLoadCoolifyConfig_FromDirectory_Good(t *testing.T) { - t.Run("loads from directory", func(t *testing.T) { - dir := t.TempDir() - envContent := `COOLIFY_URL=https://coolify.example.com -COOLIFY_TOKEN=secret-token` - - err := os.WriteFile(filepath.Join(dir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - config, err := LoadCoolifyConfig(dir) - assert.NoError(t, err) - assert.Equal(t, "https://coolify.example.com", config.URL) - }) -} - -func TestValidateCoolifyConfig_Bad(t *testing.T) { - t.Run("returns error for empty URL", func(t *testing.T) { - config := &CoolifyConfig{Token: "token"} - _, err := validateCoolifyConfig(config) - assert.Error(t, err) - assert.Contains(t, err.Error(), "COOLIFY_URL is not set") - }) - - t.Run("returns error for empty token", func(t *testing.T) { - config := &CoolifyConfig{URL: "https://coolify.example.com"} - _, err := validateCoolifyConfig(config) - assert.Error(t, err) - assert.Contains(t, err.Error(), "COOLIFY_TOKEN is not set") - }) -} - -func TestCoolifyClient_TriggerDeploy_Good(t *testing.T) { - t.Run("triggers deployment successfully", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "/api/v1/applications/app-123/deploy", r.URL.Path) - assert.Equal(t, "POST", r.Method) - assert.Equal(t, "Bearer secret-token", r.Header.Get("Authorization")) - assert.Equal(t, "application/json", r.Header.Get("Content-Type")) - - resp := CoolifyDeployment{ - ID: "dep-456", - Status: "queued", - CreatedAt: time.Now(), - } - json.NewEncoder(w).Encode(resp) - })) - defer server.Close() - - client := NewCoolifyClient(server.URL, "secret-token") - deployment, err := client.TriggerDeploy(context.Background(), "app-123", false) - - assert.NoError(t, err) - assert.Equal(t, "dep-456", deployment.ID) - assert.Equal(t, "queued", deployment.Status) - }) - - t.Run("triggers deployment with force", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - var body map[string]interface{} - json.NewDecoder(r.Body).Decode(&body) - assert.Equal(t, true, body["force"]) - - resp := CoolifyDeployment{ID: "dep-456", Status: "queued"} - json.NewEncoder(w).Encode(resp) - })) - defer server.Close() - - client := NewCoolifyClient(server.URL, "secret-token") - _, err := client.TriggerDeploy(context.Background(), "app-123", true) - assert.NoError(t, err) - }) - - t.Run("handles minimal response", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - // Return an invalid JSON response to trigger the fallback - w.Write([]byte("not json")) - })) - defer server.Close() - - client := NewCoolifyClient(server.URL, "secret-token") - deployment, err := client.TriggerDeploy(context.Background(), "app-123", false) - - assert.NoError(t, err) - // The fallback response should be returned - assert.Equal(t, "queued", deployment.Status) - }) -} - -func TestCoolifyClient_TriggerDeploy_Bad(t *testing.T) { - t.Run("fails on HTTP error", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusInternalServerError) - json.NewEncoder(w).Encode(map[string]string{"message": "Internal error"}) - })) - defer server.Close() - - client := NewCoolifyClient(server.URL, "secret-token") - _, err := client.TriggerDeploy(context.Background(), "app-123", false) - - assert.Error(t, err) - assert.Contains(t, err.Error(), "API error") - }) -} - -func TestCoolifyClient_GetDeployment_Good(t *testing.T) { - t.Run("gets deployment details", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "/api/v1/applications/app-123/deployments/dep-456", r.URL.Path) - assert.Equal(t, "GET", r.Method) - - resp := CoolifyDeployment{ - ID: "dep-456", - Status: "finished", - CommitSHA: "abc123", - Branch: "main", - } - json.NewEncoder(w).Encode(resp) - })) - defer server.Close() - - client := NewCoolifyClient(server.URL, "secret-token") - deployment, err := client.GetDeployment(context.Background(), "app-123", "dep-456") - - assert.NoError(t, err) - assert.Equal(t, "dep-456", deployment.ID) - assert.Equal(t, "finished", deployment.Status) - assert.Equal(t, "abc123", deployment.CommitSHA) - }) -} - -func TestCoolifyClient_GetDeployment_Bad(t *testing.T) { - t.Run("fails on 404", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusNotFound) - json.NewEncoder(w).Encode(map[string]string{"error": "Not found"}) - })) - defer server.Close() - - client := NewCoolifyClient(server.URL, "secret-token") - _, err := client.GetDeployment(context.Background(), "app-123", "dep-456") - - assert.Error(t, err) - assert.Contains(t, err.Error(), "Not found") - }) -} - -func TestCoolifyClient_ListDeployments_Good(t *testing.T) { - t.Run("lists deployments", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "/api/v1/applications/app-123/deployments", r.URL.Path) - assert.Equal(t, "10", r.URL.Query().Get("limit")) - - resp := []CoolifyDeployment{ - {ID: "dep-1", Status: "finished"}, - {ID: "dep-2", Status: "failed"}, - } - json.NewEncoder(w).Encode(resp) - })) - defer server.Close() - - client := NewCoolifyClient(server.URL, "secret-token") - deployments, err := client.ListDeployments(context.Background(), "app-123", 10) - - assert.NoError(t, err) - assert.Len(t, deployments, 2) - assert.Equal(t, "dep-1", deployments[0].ID) - assert.Equal(t, "dep-2", deployments[1].ID) - }) - - t.Run("lists without limit", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "", r.URL.Query().Get("limit")) - json.NewEncoder(w).Encode([]CoolifyDeployment{}) - })) - defer server.Close() - - client := NewCoolifyClient(server.URL, "secret-token") - _, err := client.ListDeployments(context.Background(), "app-123", 0) - assert.NoError(t, err) - }) -} - -func TestCoolifyClient_Rollback_Good(t *testing.T) { - t.Run("triggers rollback", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "/api/v1/applications/app-123/rollback", r.URL.Path) - assert.Equal(t, "POST", r.Method) - - var body map[string]string - json.NewDecoder(r.Body).Decode(&body) - assert.Equal(t, "dep-old", body["deployment_id"]) - - resp := CoolifyDeployment{ - ID: "dep-new", - Status: "rolling_back", - } - json.NewEncoder(w).Encode(resp) - })) - defer server.Close() - - client := NewCoolifyClient(server.URL, "secret-token") - deployment, err := client.Rollback(context.Background(), "app-123", "dep-old") - - assert.NoError(t, err) - assert.Equal(t, "dep-new", deployment.ID) - assert.Equal(t, "rolling_back", deployment.Status) - }) -} - -func TestCoolifyClient_GetApp_Good(t *testing.T) { - t.Run("gets app details", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "/api/v1/applications/app-123", r.URL.Path) - assert.Equal(t, "GET", r.Method) - - resp := CoolifyApp{ - ID: "app-123", - Name: "MyApp", - FQDN: "https://myapp.example.com", - Status: "running", - } - json.NewEncoder(w).Encode(resp) - })) - defer server.Close() - - client := NewCoolifyClient(server.URL, "secret-token") - app, err := client.GetApp(context.Background(), "app-123") - - assert.NoError(t, err) - assert.Equal(t, "app-123", app.ID) - assert.Equal(t, "MyApp", app.Name) - assert.Equal(t, "https://myapp.example.com", app.FQDN) - }) -} - -func TestCoolifyClient_SetHeaders(t *testing.T) { - t.Run("sets all required headers", func(t *testing.T) { - client := NewCoolifyClient("https://coolify.example.com", "my-token") - req, _ := http.NewRequest("GET", "https://coolify.example.com", nil) - - client.setHeaders(req) - - assert.Equal(t, "Bearer my-token", req.Header.Get("Authorization")) - assert.Equal(t, "application/json", req.Header.Get("Content-Type")) - assert.Equal(t, "application/json", req.Header.Get("Accept")) - }) -} - -func TestCoolifyClient_ParseError(t *testing.T) { - t.Run("parses message field", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusBadRequest) - json.NewEncoder(w).Encode(map[string]string{"message": "Bad request message"}) - })) - defer server.Close() - - client := NewCoolifyClient(server.URL, "token") - _, err := client.GetApp(context.Background(), "app-123") - - assert.Error(t, err) - assert.Contains(t, err.Error(), "Bad request message") - }) - - t.Run("parses error field", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusBadRequest) - json.NewEncoder(w).Encode(map[string]string{"error": "Error message"}) - })) - defer server.Close() - - client := NewCoolifyClient(server.URL, "token") - _, err := client.GetApp(context.Background(), "app-123") - - assert.Error(t, err) - assert.Contains(t, err.Error(), "Error message") - }) - - t.Run("returns raw body when no JSON fields", func(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusInternalServerError) - w.Write([]byte("Raw error message")) - })) - defer server.Close() - - client := NewCoolifyClient(server.URL, "token") - _, err := client.GetApp(context.Background(), "app-123") - - assert.Error(t, err) - assert.Contains(t, err.Error(), "Raw error message") - }) -} - -func TestEnvironmentVariablePriority(t *testing.T) { - t.Run("env vars take precedence over .env file", func(t *testing.T) { - dir := t.TempDir() - envContent := `COOLIFY_URL=https://from-file.com -COOLIFY_TOKEN=file-token` - - err := os.WriteFile(filepath.Join(dir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - // Set environment variables - origURL := os.Getenv("COOLIFY_URL") - origToken := os.Getenv("COOLIFY_TOKEN") - defer func() { - os.Setenv("COOLIFY_URL", origURL) - os.Setenv("COOLIFY_TOKEN", origToken) - }() - - os.Setenv("COOLIFY_URL", "https://from-env.com") - os.Setenv("COOLIFY_TOKEN", "env-token") - - config, err := LoadCoolifyConfig(dir) - assert.NoError(t, err) - // Environment variables should take precedence - assert.Equal(t, "https://from-env.com", config.URL) - assert.Equal(t, "env-token", config.Token) - }) -} diff --git a/pkg/php/deploy.go b/pkg/php/deploy.go deleted file mode 100644 index 220c262..0000000 --- a/pkg/php/deploy.go +++ /dev/null @@ -1,407 +0,0 @@ -package php - -import ( - "context" - "time" - - "github.com/host-uk/core/pkg/cli" -) - -// Environment represents a deployment environment. -type Environment string - -const ( - // EnvProduction is the production environment. - EnvProduction Environment = "production" - // EnvStaging is the staging environment. - EnvStaging Environment = "staging" -) - -// DeployOptions configures a deployment. -type DeployOptions struct { - // Dir is the project directory containing .env config. - Dir string - - // Environment is the target environment (production or staging). - Environment Environment - - // Force triggers a deployment even if no changes are detected. - Force bool - - // Wait blocks until deployment completes. - Wait bool - - // WaitTimeout is the maximum time to wait for deployment. - // Defaults to 10 minutes. - WaitTimeout time.Duration - - // PollInterval is how often to check deployment status when waiting. - // Defaults to 5 seconds. - PollInterval time.Duration -} - -// StatusOptions configures a status check. -type StatusOptions struct { - // Dir is the project directory containing .env config. - Dir string - - // Environment is the target environment (production or staging). - Environment Environment - - // DeploymentID is a specific deployment to check. - // If empty, returns the latest deployment. - DeploymentID string -} - -// RollbackOptions configures a rollback. -type RollbackOptions struct { - // Dir is the project directory containing .env config. - Dir string - - // Environment is the target environment (production or staging). - Environment Environment - - // DeploymentID is the deployment to rollback to. - // If empty, rolls back to the previous successful deployment. - DeploymentID string - - // Wait blocks until rollback completes. - Wait bool - - // WaitTimeout is the maximum time to wait for rollback. - WaitTimeout time.Duration -} - -// DeploymentStatus represents the status of a deployment. -type DeploymentStatus struct { - // ID is the deployment identifier. - ID string - - // Status is the current deployment status. - // Values: queued, building, deploying, finished, failed, cancelled - Status string - - // URL is the deployed application URL. - URL string - - // Commit is the git commit SHA. - Commit string - - // CommitMessage is the git commit message. - CommitMessage string - - // Branch is the git branch. - Branch string - - // StartedAt is when the deployment started. - StartedAt time.Time - - // CompletedAt is when the deployment completed. - CompletedAt time.Time - - // Log contains deployment logs. - Log string -} - -// Deploy triggers a deployment to Coolify. -func Deploy(ctx context.Context, opts DeployOptions) (*DeploymentStatus, error) { - if opts.Dir == "" { - opts.Dir = "." - } - if opts.Environment == "" { - opts.Environment = EnvProduction - } - if opts.WaitTimeout == 0 { - opts.WaitTimeout = 10 * time.Minute - } - if opts.PollInterval == 0 { - opts.PollInterval = 5 * time.Second - } - - // Load config - config, err := LoadCoolifyConfig(opts.Dir) - if err != nil { - return nil, cli.WrapVerb(err, "load", "Coolify config") - } - - // Get app ID for environment - appID := getAppIDForEnvironment(config, opts.Environment) - if appID == "" { - return nil, cli.Err("no app ID configured for %s environment", opts.Environment) - } - - // Create client - client := NewCoolifyClient(config.URL, config.Token) - - // Trigger deployment - deployment, err := client.TriggerDeploy(ctx, appID, opts.Force) - if err != nil { - return nil, cli.WrapVerb(err, "trigger", "deployment") - } - - status := convertDeployment(deployment) - - // Wait for completion if requested - if opts.Wait && deployment.ID != "" { - status, err = waitForDeployment(ctx, client, appID, deployment.ID, opts.WaitTimeout, opts.PollInterval) - if err != nil { - return status, err - } - } - - // Get app info for URL - app, err := client.GetApp(ctx, appID) - if err == nil && app.FQDN != "" { - status.URL = app.FQDN - } - - return status, nil -} - -// DeployStatus retrieves the status of a deployment. -func DeployStatus(ctx context.Context, opts StatusOptions) (*DeploymentStatus, error) { - if opts.Dir == "" { - opts.Dir = "." - } - if opts.Environment == "" { - opts.Environment = EnvProduction - } - - // Load config - config, err := LoadCoolifyConfig(opts.Dir) - if err != nil { - return nil, cli.WrapVerb(err, "load", "Coolify config") - } - - // Get app ID for environment - appID := getAppIDForEnvironment(config, opts.Environment) - if appID == "" { - return nil, cli.Err("no app ID configured for %s environment", opts.Environment) - } - - // Create client - client := NewCoolifyClient(config.URL, config.Token) - - var deployment *CoolifyDeployment - - if opts.DeploymentID != "" { - // Get specific deployment - deployment, err = client.GetDeployment(ctx, appID, opts.DeploymentID) - if err != nil { - return nil, cli.WrapVerb(err, "get", "deployment") - } - } else { - // Get latest deployment - deployments, err := client.ListDeployments(ctx, appID, 1) - if err != nil { - return nil, cli.WrapVerb(err, "list", "deployments") - } - if len(deployments) == 0 { - return nil, cli.Err("no deployments found") - } - deployment = &deployments[0] - } - - status := convertDeployment(deployment) - - // Get app info for URL - app, err := client.GetApp(ctx, appID) - if err == nil && app.FQDN != "" { - status.URL = app.FQDN - } - - return status, nil -} - -// Rollback triggers a rollback to a previous deployment. -func Rollback(ctx context.Context, opts RollbackOptions) (*DeploymentStatus, error) { - if opts.Dir == "" { - opts.Dir = "." - } - if opts.Environment == "" { - opts.Environment = EnvProduction - } - if opts.WaitTimeout == 0 { - opts.WaitTimeout = 10 * time.Minute - } - - // Load config - config, err := LoadCoolifyConfig(opts.Dir) - if err != nil { - return nil, cli.WrapVerb(err, "load", "Coolify config") - } - - // Get app ID for environment - appID := getAppIDForEnvironment(config, opts.Environment) - if appID == "" { - return nil, cli.Err("no app ID configured for %s environment", opts.Environment) - } - - // Create client - client := NewCoolifyClient(config.URL, config.Token) - - // Find deployment to rollback to - deploymentID := opts.DeploymentID - if deploymentID == "" { - // Find previous successful deployment - deployments, err := client.ListDeployments(ctx, appID, 10) - if err != nil { - return nil, cli.WrapVerb(err, "list", "deployments") - } - - // Skip the first (current) deployment, find the last successful one - for i, d := range deployments { - if i == 0 { - continue // Skip current deployment - } - if d.Status == "finished" || d.Status == "success" { - deploymentID = d.ID - break - } - } - - if deploymentID == "" { - return nil, cli.Err("no previous successful deployment found to rollback to") - } - } - - // Trigger rollback - deployment, err := client.Rollback(ctx, appID, deploymentID) - if err != nil { - return nil, cli.WrapVerb(err, "trigger", "rollback") - } - - status := convertDeployment(deployment) - - // Wait for completion if requested - if opts.Wait && deployment.ID != "" { - status, err = waitForDeployment(ctx, client, appID, deployment.ID, opts.WaitTimeout, 5*time.Second) - if err != nil { - return status, err - } - } - - return status, nil -} - -// ListDeployments retrieves recent deployments. -func ListDeployments(ctx context.Context, dir string, env Environment, limit int) ([]DeploymentStatus, error) { - if dir == "" { - dir = "." - } - if env == "" { - env = EnvProduction - } - if limit == 0 { - limit = 10 - } - - // Load config - config, err := LoadCoolifyConfig(dir) - if err != nil { - return nil, cli.WrapVerb(err, "load", "Coolify config") - } - - // Get app ID for environment - appID := getAppIDForEnvironment(config, env) - if appID == "" { - return nil, cli.Err("no app ID configured for %s environment", env) - } - - // Create client - client := NewCoolifyClient(config.URL, config.Token) - - deployments, err := client.ListDeployments(ctx, appID, limit) - if err != nil { - return nil, cli.WrapVerb(err, "list", "deployments") - } - - result := make([]DeploymentStatus, len(deployments)) - for i, d := range deployments { - result[i] = *convertDeployment(&d) - } - - return result, nil -} - -// getAppIDForEnvironment returns the app ID for the given environment. -func getAppIDForEnvironment(config *CoolifyConfig, env Environment) string { - switch env { - case EnvStaging: - if config.StagingAppID != "" { - return config.StagingAppID - } - return config.AppID // Fallback to production - default: - return config.AppID - } -} - -// convertDeployment converts a CoolifyDeployment to DeploymentStatus. -func convertDeployment(d *CoolifyDeployment) *DeploymentStatus { - return &DeploymentStatus{ - ID: d.ID, - Status: d.Status, - URL: d.DeployedURL, - Commit: d.CommitSHA, - CommitMessage: d.CommitMsg, - Branch: d.Branch, - StartedAt: d.CreatedAt, - CompletedAt: d.FinishedAt, - Log: d.Log, - } -} - -// waitForDeployment polls for deployment completion. -func waitForDeployment(ctx context.Context, client *CoolifyClient, appID, deploymentID string, timeout, interval time.Duration) (*DeploymentStatus, error) { - deadline := time.Now().Add(timeout) - - for time.Now().Before(deadline) { - select { - case <-ctx.Done(): - return nil, ctx.Err() - default: - } - - deployment, err := client.GetDeployment(ctx, appID, deploymentID) - if err != nil { - return nil, cli.WrapVerb(err, "get", "deployment status") - } - - status := convertDeployment(deployment) - - // Check if deployment is complete - switch deployment.Status { - case "finished", "success": - return status, nil - case "failed", "error": - return status, cli.Err("deployment failed: %s", deployment.Status) - case "cancelled": - return status, cli.Err("deployment was cancelled") - } - - // Still in progress, wait and retry - select { - case <-ctx.Done(): - return status, ctx.Err() - case <-time.After(interval): - } - } - - return nil, cli.Err("deployment timed out after %v", timeout) -} - -// IsDeploymentComplete returns true if the status indicates completion. -func IsDeploymentComplete(status string) bool { - switch status { - case "finished", "success", "failed", "error", "cancelled": - return true - default: - return false - } -} - -// IsDeploymentSuccessful returns true if the status indicates success. -func IsDeploymentSuccessful(status string) bool { - return status == "finished" || status == "success" -} diff --git a/pkg/php/deploy_internal_test.go b/pkg/php/deploy_internal_test.go deleted file mode 100644 index 9362aaf..0000000 --- a/pkg/php/deploy_internal_test.go +++ /dev/null @@ -1,221 +0,0 @@ -package php - -import ( - "testing" - "time" - - "github.com/stretchr/testify/assert" -) - -func TestConvertDeployment_Good(t *testing.T) { - t.Run("converts all fields", func(t *testing.T) { - now := time.Now() - coolify := &CoolifyDeployment{ - ID: "dep-123", - Status: "finished", - CommitSHA: "abc123", - CommitMsg: "Test commit", - Branch: "main", - CreatedAt: now, - FinishedAt: now.Add(5 * time.Minute), - Log: "Build successful", - DeployedURL: "https://app.example.com", - } - - status := convertDeployment(coolify) - - assert.Equal(t, "dep-123", status.ID) - assert.Equal(t, "finished", status.Status) - assert.Equal(t, "https://app.example.com", status.URL) - assert.Equal(t, "abc123", status.Commit) - assert.Equal(t, "Test commit", status.CommitMessage) - assert.Equal(t, "main", status.Branch) - assert.Equal(t, now, status.StartedAt) - assert.Equal(t, now.Add(5*time.Minute), status.CompletedAt) - assert.Equal(t, "Build successful", status.Log) - }) - - t.Run("handles empty deployment", func(t *testing.T) { - coolify := &CoolifyDeployment{} - status := convertDeployment(coolify) - - assert.Empty(t, status.ID) - assert.Empty(t, status.Status) - }) -} - -func TestDeploymentStatus_Struct_Good(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - now := time.Now() - status := DeploymentStatus{ - ID: "dep-123", - Status: "finished", - URL: "https://app.example.com", - Commit: "abc123", - CommitMessage: "Test commit", - Branch: "main", - StartedAt: now, - CompletedAt: now.Add(5 * time.Minute), - Log: "Build log", - } - - assert.Equal(t, "dep-123", status.ID) - assert.Equal(t, "finished", status.Status) - assert.Equal(t, "https://app.example.com", status.URL) - assert.Equal(t, "abc123", status.Commit) - assert.Equal(t, "Test commit", status.CommitMessage) - assert.Equal(t, "main", status.Branch) - assert.Equal(t, "Build log", status.Log) - }) -} - -func TestDeployOptions_Struct_Good(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - opts := DeployOptions{ - Dir: "/project", - Environment: EnvProduction, - Force: true, - Wait: true, - WaitTimeout: 10 * time.Minute, - PollInterval: 5 * time.Second, - } - - assert.Equal(t, "/project", opts.Dir) - assert.Equal(t, EnvProduction, opts.Environment) - assert.True(t, opts.Force) - assert.True(t, opts.Wait) - assert.Equal(t, 10*time.Minute, opts.WaitTimeout) - assert.Equal(t, 5*time.Second, opts.PollInterval) - }) -} - -func TestStatusOptions_Struct_Good(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - opts := StatusOptions{ - Dir: "/project", - Environment: EnvStaging, - DeploymentID: "dep-123", - } - - assert.Equal(t, "/project", opts.Dir) - assert.Equal(t, EnvStaging, opts.Environment) - assert.Equal(t, "dep-123", opts.DeploymentID) - }) -} - -func TestRollbackOptions_Struct_Good(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - opts := RollbackOptions{ - Dir: "/project", - Environment: EnvProduction, - DeploymentID: "dep-old", - Wait: true, - WaitTimeout: 5 * time.Minute, - } - - assert.Equal(t, "/project", opts.Dir) - assert.Equal(t, EnvProduction, opts.Environment) - assert.Equal(t, "dep-old", opts.DeploymentID) - assert.True(t, opts.Wait) - assert.Equal(t, 5*time.Minute, opts.WaitTimeout) - }) -} - -func TestEnvironment_Constants(t *testing.T) { - t.Run("constants are defined", func(t *testing.T) { - assert.Equal(t, Environment("production"), EnvProduction) - assert.Equal(t, Environment("staging"), EnvStaging) - }) -} - -func TestGetAppIDForEnvironment_Edge(t *testing.T) { - t.Run("staging without staging ID falls back to production", func(t *testing.T) { - config := &CoolifyConfig{ - AppID: "prod-123", - // No StagingAppID set - } - - id := getAppIDForEnvironment(config, EnvStaging) - assert.Equal(t, "prod-123", id) - }) - - t.Run("staging with staging ID uses staging", func(t *testing.T) { - config := &CoolifyConfig{ - AppID: "prod-123", - StagingAppID: "staging-456", - } - - id := getAppIDForEnvironment(config, EnvStaging) - assert.Equal(t, "staging-456", id) - }) - - t.Run("production uses production ID", func(t *testing.T) { - config := &CoolifyConfig{ - AppID: "prod-123", - StagingAppID: "staging-456", - } - - id := getAppIDForEnvironment(config, EnvProduction) - assert.Equal(t, "prod-123", id) - }) - - t.Run("unknown environment uses production", func(t *testing.T) { - config := &CoolifyConfig{ - AppID: "prod-123", - } - - id := getAppIDForEnvironment(config, "unknown") - assert.Equal(t, "prod-123", id) - }) -} - -func TestIsDeploymentComplete_Edge(t *testing.T) { - tests := []struct { - status string - expected bool - }{ - {"finished", true}, - {"success", true}, - {"failed", true}, - {"error", true}, - {"cancelled", true}, - {"queued", false}, - {"building", false}, - {"deploying", false}, - {"pending", false}, - {"rolling_back", false}, - {"", false}, - {"unknown", false}, - } - - for _, tt := range tests { - t.Run(tt.status, func(t *testing.T) { - result := IsDeploymentComplete(tt.status) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestIsDeploymentSuccessful_Edge(t *testing.T) { - tests := []struct { - status string - expected bool - }{ - {"finished", true}, - {"success", true}, - {"failed", false}, - {"error", false}, - {"cancelled", false}, - {"queued", false}, - {"building", false}, - {"deploying", false}, - {"", false}, - } - - for _, tt := range tests { - t.Run(tt.status, func(t *testing.T) { - result := IsDeploymentSuccessful(tt.status) - assert.Equal(t, tt.expected, result) - }) - } -} diff --git a/pkg/php/deploy_test.go b/pkg/php/deploy_test.go deleted file mode 100644 index 436d457..0000000 --- a/pkg/php/deploy_test.go +++ /dev/null @@ -1,257 +0,0 @@ -package php - -import ( - "os" - "path/filepath" - "testing" -) - -func TestLoadCoolifyConfig_Good(t *testing.T) { - tests := []struct { - name string - envContent string - wantURL string - wantToken string - wantAppID string - wantStaging string - }{ - { - name: "all values set", - envContent: `COOLIFY_URL=https://coolify.example.com -COOLIFY_TOKEN=secret-token -COOLIFY_APP_ID=app-123 -COOLIFY_STAGING_APP_ID=staging-456`, - wantURL: "https://coolify.example.com", - wantToken: "secret-token", - wantAppID: "app-123", - wantStaging: "staging-456", - }, - { - name: "quoted values", - envContent: `COOLIFY_URL="https://coolify.example.com" -COOLIFY_TOKEN='secret-token' -COOLIFY_APP_ID="app-123"`, - wantURL: "https://coolify.example.com", - wantToken: "secret-token", - wantAppID: "app-123", - }, - { - name: "with comments and blank lines", - envContent: `# Coolify configuration -COOLIFY_URL=https://coolify.example.com - -# API token -COOLIFY_TOKEN=secret-token -COOLIFY_APP_ID=app-123 -# COOLIFY_STAGING_APP_ID=not-this`, - wantURL: "https://coolify.example.com", - wantToken: "secret-token", - wantAppID: "app-123", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Create temp directory - dir := t.TempDir() - envPath := filepath.Join(dir, ".env") - - // Write .env file - if err := os.WriteFile(envPath, []byte(tt.envContent), 0644); err != nil { - t.Fatalf("failed to write .env: %v", err) - } - - // Load config - config, err := LoadCoolifyConfig(dir) - if err != nil { - t.Fatalf("LoadCoolifyConfig() error = %v", err) - } - - if config.URL != tt.wantURL { - t.Errorf("URL = %q, want %q", config.URL, tt.wantURL) - } - if config.Token != tt.wantToken { - t.Errorf("Token = %q, want %q", config.Token, tt.wantToken) - } - if config.AppID != tt.wantAppID { - t.Errorf("AppID = %q, want %q", config.AppID, tt.wantAppID) - } - if tt.wantStaging != "" && config.StagingAppID != tt.wantStaging { - t.Errorf("StagingAppID = %q, want %q", config.StagingAppID, tt.wantStaging) - } - }) - } -} - -func TestLoadCoolifyConfig_Bad(t *testing.T) { - tests := []struct { - name string - envContent string - wantErr string - }{ - { - name: "missing URL", - envContent: "COOLIFY_TOKEN=secret", - wantErr: "COOLIFY_URL is not set", - }, - { - name: "missing token", - envContent: "COOLIFY_URL=https://coolify.example.com", - wantErr: "COOLIFY_TOKEN is not set", - }, - { - name: "empty values", - envContent: "COOLIFY_URL=\nCOOLIFY_TOKEN=", - wantErr: "COOLIFY_URL is not set", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Create temp directory - dir := t.TempDir() - envPath := filepath.Join(dir, ".env") - - // Write .env file - if err := os.WriteFile(envPath, []byte(tt.envContent), 0644); err != nil { - t.Fatalf("failed to write .env: %v", err) - } - - // Load config - _, err := LoadCoolifyConfig(dir) - if err == nil { - t.Fatal("LoadCoolifyConfig() expected error, got nil") - } - - if err.Error() != tt.wantErr { - t.Errorf("error = %q, want %q", err.Error(), tt.wantErr) - } - }) - } -} - -func TestGetAppIDForEnvironment_Good(t *testing.T) { - config := &CoolifyConfig{ - URL: "https://coolify.example.com", - Token: "token", - AppID: "prod-123", - StagingAppID: "staging-456", - } - - tests := []struct { - name string - env Environment - wantID string - }{ - { - name: "production environment", - env: EnvProduction, - wantID: "prod-123", - }, - { - name: "staging environment", - env: EnvStaging, - wantID: "staging-456", - }, - { - name: "empty defaults to production", - env: "", - wantID: "prod-123", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - id := getAppIDForEnvironment(config, tt.env) - if id != tt.wantID { - t.Errorf("getAppIDForEnvironment() = %q, want %q", id, tt.wantID) - } - }) - } -} - -func TestGetAppIDForEnvironment_FallbackToProduction(t *testing.T) { - config := &CoolifyConfig{ - URL: "https://coolify.example.com", - Token: "token", - AppID: "prod-123", - // No staging app ID - } - - // Staging should fall back to production - id := getAppIDForEnvironment(config, EnvStaging) - if id != "prod-123" { - t.Errorf("getAppIDForEnvironment(EnvStaging) = %q, want %q (should fallback)", id, "prod-123") - } -} - -func TestIsDeploymentComplete_Good(t *testing.T) { - completeStatuses := []string{"finished", "success", "failed", "error", "cancelled"} - for _, status := range completeStatuses { - if !IsDeploymentComplete(status) { - t.Errorf("IsDeploymentComplete(%q) = false, want true", status) - } - } - - incompleteStatuses := []string{"queued", "building", "deploying", "pending", "rolling_back"} - for _, status := range incompleteStatuses { - if IsDeploymentComplete(status) { - t.Errorf("IsDeploymentComplete(%q) = true, want false", status) - } - } -} - -func TestIsDeploymentSuccessful_Good(t *testing.T) { - successStatuses := []string{"finished", "success"} - for _, status := range successStatuses { - if !IsDeploymentSuccessful(status) { - t.Errorf("IsDeploymentSuccessful(%q) = false, want true", status) - } - } - - failedStatuses := []string{"failed", "error", "cancelled", "queued", "building"} - for _, status := range failedStatuses { - if IsDeploymentSuccessful(status) { - t.Errorf("IsDeploymentSuccessful(%q) = true, want false", status) - } - } -} - -func TestNewCoolifyClient_Good(t *testing.T) { - tests := []struct { - name string - baseURL string - wantBaseURL string - }{ - { - name: "URL without trailing slash", - baseURL: "https://coolify.example.com", - wantBaseURL: "https://coolify.example.com", - }, - { - name: "URL with trailing slash", - baseURL: "https://coolify.example.com/", - wantBaseURL: "https://coolify.example.com", - }, - { - name: "URL with api path", - baseURL: "https://coolify.example.com/api/", - wantBaseURL: "https://coolify.example.com/api", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - client := NewCoolifyClient(tt.baseURL, "token") - if client.BaseURL != tt.wantBaseURL { - t.Errorf("BaseURL = %q, want %q", client.BaseURL, tt.wantBaseURL) - } - if client.Token != "token" { - t.Errorf("Token = %q, want %q", client.Token, "token") - } - if client.HTTPClient == nil { - t.Error("HTTPClient is nil") - } - }) - } -} diff --git a/pkg/php/detect.go b/pkg/php/detect.go deleted file mode 100644 index 3afc0b5..0000000 --- a/pkg/php/detect.go +++ /dev/null @@ -1,288 +0,0 @@ -package php - -import ( - "bufio" - "encoding/json" - "os" - "path/filepath" - "strings" -) - -// DetectedService represents a service that was detected in a Laravel project. -type DetectedService string - -const ( - ServiceFrankenPHP DetectedService = "frankenphp" - ServiceVite DetectedService = "vite" - ServiceHorizon DetectedService = "horizon" - ServiceReverb DetectedService = "reverb" - ServiceRedis DetectedService = "redis" -) - -// IsLaravelProject checks if the given directory is a Laravel project. -// It looks for the presence of artisan file and laravel in composer.json. -func IsLaravelProject(dir string) bool { - // Check for artisan file - artisanPath := filepath.Join(dir, "artisan") - if _, err := os.Stat(artisanPath); os.IsNotExist(err) { - return false - } - - // Check composer.json for laravel/framework - composerPath := filepath.Join(dir, "composer.json") - data, err := os.ReadFile(composerPath) - if err != nil { - return false - } - - var composer struct { - Require map[string]string `json:"require"` - RequireDev map[string]string `json:"require-dev"` - } - - if err := json.Unmarshal(data, &composer); err != nil { - return false - } - - // Check for laravel/framework in require - if _, ok := composer.Require["laravel/framework"]; ok { - return true - } - - // Also check require-dev (less common but possible) - if _, ok := composer.RequireDev["laravel/framework"]; ok { - return true - } - - return false -} - -// IsFrankenPHPProject checks if the project is configured for FrankenPHP. -// It looks for laravel/octane with frankenphp driver. -func IsFrankenPHPProject(dir string) bool { - // Check composer.json for laravel/octane - composerPath := filepath.Join(dir, "composer.json") - data, err := os.ReadFile(composerPath) - if err != nil { - return false - } - - var composer struct { - Require map[string]string `json:"require"` - } - - if err := json.Unmarshal(data, &composer); err != nil { - return false - } - - if _, ok := composer.Require["laravel/octane"]; !ok { - return false - } - - // Check octane config for frankenphp - configPath := filepath.Join(dir, "config", "octane.php") - if _, err := os.Stat(configPath); os.IsNotExist(err) { - // If no config exists but octane is installed, assume frankenphp - return true - } - - configData, err := os.ReadFile(configPath) - if err != nil { - return true // Assume frankenphp if we can't read config - } - - // Look for frankenphp in the config - return strings.Contains(string(configData), "frankenphp") -} - -// DetectServices detects which services are needed based on project files. -func DetectServices(dir string) []DetectedService { - services := []DetectedService{} - - // FrankenPHP/Octane is always needed for a Laravel dev environment - if IsFrankenPHPProject(dir) || IsLaravelProject(dir) { - services = append(services, ServiceFrankenPHP) - } - - // Check for Vite - if hasVite(dir) { - services = append(services, ServiceVite) - } - - // Check for Horizon - if hasHorizon(dir) { - services = append(services, ServiceHorizon) - } - - // Check for Reverb - if hasReverb(dir) { - services = append(services, ServiceReverb) - } - - // Check for Redis - if needsRedis(dir) { - services = append(services, ServiceRedis) - } - - return services -} - -// hasVite checks if the project uses Vite. -func hasVite(dir string) bool { - viteConfigs := []string{ - "vite.config.js", - "vite.config.ts", - "vite.config.mjs", - "vite.config.mts", - } - - for _, config := range viteConfigs { - if _, err := os.Stat(filepath.Join(dir, config)); err == nil { - return true - } - } - - return false -} - -// hasHorizon checks if Laravel Horizon is configured. -func hasHorizon(dir string) bool { - horizonConfig := filepath.Join(dir, "config", "horizon.php") - _, err := os.Stat(horizonConfig) - return err == nil -} - -// hasReverb checks if Laravel Reverb is configured. -func hasReverb(dir string) bool { - reverbConfig := filepath.Join(dir, "config", "reverb.php") - _, err := os.Stat(reverbConfig) - return err == nil -} - -// needsRedis checks if the project uses Redis based on .env configuration. -func needsRedis(dir string) bool { - envPath := filepath.Join(dir, ".env") - file, err := os.Open(envPath) - if err != nil { - return false - } - defer file.Close() - - scanner := bufio.NewScanner(file) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - if strings.HasPrefix(line, "#") { - continue - } - - // Check for Redis-related environment variables - redisIndicators := []string{ - "REDIS_HOST=", - "CACHE_DRIVER=redis", - "QUEUE_CONNECTION=redis", - "SESSION_DRIVER=redis", - "BROADCAST_DRIVER=redis", - } - - for _, indicator := range redisIndicators { - if strings.HasPrefix(line, indicator) { - // Check if it's set to localhost or 127.0.0.1 - if strings.Contains(line, "127.0.0.1") || strings.Contains(line, "localhost") || - indicator != "REDIS_HOST=" { - return true - } - } - } - } - - return false -} - -// DetectPackageManager detects which package manager is used in the project. -// Returns "npm", "pnpm", "yarn", or "bun". -func DetectPackageManager(dir string) string { - // Check for lock files in order of preference - lockFiles := []struct { - file string - manager string - }{ - {"bun.lockb", "bun"}, - {"pnpm-lock.yaml", "pnpm"}, - {"yarn.lock", "yarn"}, - {"package-lock.json", "npm"}, - } - - for _, lf := range lockFiles { - if _, err := os.Stat(filepath.Join(dir, lf.file)); err == nil { - return lf.manager - } - } - - // Default to npm if no lock file found - return "npm" -} - -// GetLaravelAppName extracts the application name from Laravel's .env file. -func GetLaravelAppName(dir string) string { - envPath := filepath.Join(dir, ".env") - file, err := os.Open(envPath) - if err != nil { - return "" - } - defer file.Close() - - scanner := bufio.NewScanner(file) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - if strings.HasPrefix(line, "APP_NAME=") { - value := strings.TrimPrefix(line, "APP_NAME=") - // Remove quotes if present - value = strings.Trim(value, `"'`) - return value - } - } - - return "" -} - -// GetLaravelAppURL extracts the application URL from Laravel's .env file. -func GetLaravelAppURL(dir string) string { - envPath := filepath.Join(dir, ".env") - file, err := os.Open(envPath) - if err != nil { - return "" - } - defer file.Close() - - scanner := bufio.NewScanner(file) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - if strings.HasPrefix(line, "APP_URL=") { - value := strings.TrimPrefix(line, "APP_URL=") - // Remove quotes if present - value = strings.Trim(value, `"'`) - return value - } - } - - return "" -} - -// ExtractDomainFromURL extracts the domain from a URL string. -func ExtractDomainFromURL(url string) string { - // Remove protocol - domain := strings.TrimPrefix(url, "https://") - domain = strings.TrimPrefix(domain, "http://") - - // Remove port if present - if idx := strings.Index(domain, ":"); idx != -1 { - domain = domain[:idx] - } - - // Remove path if present - if idx := strings.Index(domain, "/"); idx != -1 { - domain = domain[:idx] - } - - return domain -} diff --git a/pkg/php/detect_test.go b/pkg/php/detect_test.go deleted file mode 100644 index 7cd2128..0000000 --- a/pkg/php/detect_test.go +++ /dev/null @@ -1,660 +0,0 @@ -package php - -import ( - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestIsLaravelProject_Good(t *testing.T) { - t.Run("valid Laravel project with artisan and composer.json", func(t *testing.T) { - dir := t.TempDir() - - // Create artisan file - artisanPath := filepath.Join(dir, "artisan") - err := os.WriteFile(artisanPath, []byte("#!/usr/bin/env php\n"), 0755) - require.NoError(t, err) - - // Create composer.json with laravel/framework - composerJSON := `{ - "name": "test/laravel-project", - "require": { - "php": "^8.2", - "laravel/framework": "^11.0" - } - }` - composerPath := filepath.Join(dir, "composer.json") - err = os.WriteFile(composerPath, []byte(composerJSON), 0644) - require.NoError(t, err) - - assert.True(t, IsLaravelProject(dir)) - }) - - t.Run("Laravel in require-dev", func(t *testing.T) { - dir := t.TempDir() - - // Create artisan file - artisanPath := filepath.Join(dir, "artisan") - err := os.WriteFile(artisanPath, []byte("#!/usr/bin/env php\n"), 0755) - require.NoError(t, err) - - // Create composer.json with laravel/framework in require-dev - composerJSON := `{ - "name": "test/laravel-project", - "require-dev": { - "laravel/framework": "^11.0" - } - }` - composerPath := filepath.Join(dir, "composer.json") - err = os.WriteFile(composerPath, []byte(composerJSON), 0644) - require.NoError(t, err) - - assert.True(t, IsLaravelProject(dir)) - }) -} - -func TestIsLaravelProject_Bad(t *testing.T) { - t.Run("missing artisan file", func(t *testing.T) { - dir := t.TempDir() - - // Create composer.json but no artisan - composerJSON := `{ - "name": "test/laravel-project", - "require": { - "laravel/framework": "^11.0" - } - }` - composerPath := filepath.Join(dir, "composer.json") - err := os.WriteFile(composerPath, []byte(composerJSON), 0644) - require.NoError(t, err) - - assert.False(t, IsLaravelProject(dir)) - }) - - t.Run("missing composer.json", func(t *testing.T) { - dir := t.TempDir() - - // Create artisan but no composer.json - artisanPath := filepath.Join(dir, "artisan") - err := os.WriteFile(artisanPath, []byte("#!/usr/bin/env php\n"), 0755) - require.NoError(t, err) - - assert.False(t, IsLaravelProject(dir)) - }) - - t.Run("composer.json without Laravel", func(t *testing.T) { - dir := t.TempDir() - - // Create artisan file - artisanPath := filepath.Join(dir, "artisan") - err := os.WriteFile(artisanPath, []byte("#!/usr/bin/env php\n"), 0755) - require.NoError(t, err) - - // Create composer.json without laravel/framework - composerJSON := `{ - "name": "test/symfony-project", - "require": { - "symfony/framework-bundle": "^7.0" - } - }` - composerPath := filepath.Join(dir, "composer.json") - err = os.WriteFile(composerPath, []byte(composerJSON), 0644) - require.NoError(t, err) - - assert.False(t, IsLaravelProject(dir)) - }) - - t.Run("invalid composer.json", func(t *testing.T) { - dir := t.TempDir() - - // Create artisan file - artisanPath := filepath.Join(dir, "artisan") - err := os.WriteFile(artisanPath, []byte("#!/usr/bin/env php\n"), 0755) - require.NoError(t, err) - - // Create invalid composer.json - composerPath := filepath.Join(dir, "composer.json") - err = os.WriteFile(composerPath, []byte("not valid json{"), 0644) - require.NoError(t, err) - - assert.False(t, IsLaravelProject(dir)) - }) - - t.Run("empty directory", func(t *testing.T) { - dir := t.TempDir() - assert.False(t, IsLaravelProject(dir)) - }) - - t.Run("non-existent directory", func(t *testing.T) { - assert.False(t, IsLaravelProject("/non/existent/path")) - }) -} - -func TestIsFrankenPHPProject_Good(t *testing.T) { - t.Run("project with octane and frankenphp config", func(t *testing.T) { - dir := t.TempDir() - - // Create composer.json with laravel/octane - composerJSON := `{ - "require": { - "laravel/octane": "^2.0" - } - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - // Create config directory and octane.php - configDir := filepath.Join(dir, "config") - err = os.MkdirAll(configDir, 0755) - require.NoError(t, err) - - octaneConfig := ` 'frankenphp', -];` - err = os.WriteFile(filepath.Join(configDir, "octane.php"), []byte(octaneConfig), 0644) - require.NoError(t, err) - - assert.True(t, IsFrankenPHPProject(dir)) - }) - - t.Run("project with octane but no config file", func(t *testing.T) { - dir := t.TempDir() - - // Create composer.json with laravel/octane - composerJSON := `{ - "require": { - "laravel/octane": "^2.0" - } - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - // No config file - should still return true (assume frankenphp) - assert.True(t, IsFrankenPHPProject(dir)) - }) - - t.Run("project with octane but unreadable config file", func(t *testing.T) { - dir := t.TempDir() - - // Create composer.json with laravel/octane - composerJSON := `{ - "require": { - "laravel/octane": "^2.0" - } - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - // Create config directory and octane.php with no read permissions - configDir := filepath.Join(dir, "config") - err = os.MkdirAll(configDir, 0755) - require.NoError(t, err) - - octanePath := filepath.Join(configDir, "octane.php") - err = os.WriteFile(octanePath, []byte(" 'swoole', -];` - err = os.WriteFile(filepath.Join(configDir, "octane.php"), []byte(octaneConfig), 0644) - require.NoError(t, err) - - assert.False(t, IsFrankenPHPProject(dir)) - }) -} diff --git a/pkg/php/dockerfile.go b/pkg/php/dockerfile.go deleted file mode 100644 index 43a3b6c..0000000 --- a/pkg/php/dockerfile.go +++ /dev/null @@ -1,397 +0,0 @@ -package php - -import ( - "encoding/json" - "os" - "path/filepath" - "sort" - "strings" - - "github.com/host-uk/core/pkg/cli" -) - -// DockerfileConfig holds configuration for generating a Dockerfile. -type DockerfileConfig struct { - // PHPVersion is the PHP version to use (default: "8.3"). - PHPVersion string - - // BaseImage is the base Docker image (default: "dunglas/frankenphp"). - BaseImage string - - // PHPExtensions is the list of PHP extensions to install. - PHPExtensions []string - - // HasAssets indicates if the project has frontend assets to build. - HasAssets bool - - // PackageManager is the Node.js package manager (npm, pnpm, yarn, bun). - PackageManager string - - // IsLaravel indicates if this is a Laravel project. - IsLaravel bool - - // HasOctane indicates if Laravel Octane is installed. - HasOctane bool - - // UseAlpine uses the Alpine-based image (smaller). - UseAlpine bool -} - -// GenerateDockerfile generates a Dockerfile for a PHP/Laravel project. -// It auto-detects dependencies from composer.json and project structure. -func GenerateDockerfile(dir string) (string, error) { - config, err := DetectDockerfileConfig(dir) - if err != nil { - return "", err - } - - return GenerateDockerfileFromConfig(config), nil -} - -// DetectDockerfileConfig detects configuration from project files. -func DetectDockerfileConfig(dir string) (*DockerfileConfig, error) { - config := &DockerfileConfig{ - PHPVersion: "8.3", - BaseImage: "dunglas/frankenphp", - UseAlpine: true, - } - - // Read composer.json - composerPath := filepath.Join(dir, "composer.json") - composerData, err := os.ReadFile(composerPath) - if err != nil { - return nil, cli.WrapVerb(err, "read", "composer.json") - } - - var composer ComposerJSON - if err := json.Unmarshal(composerData, &composer); err != nil { - return nil, cli.WrapVerb(err, "parse", "composer.json") - } - - // Detect PHP version from composer.json - if phpVersion, ok := composer.Require["php"]; ok { - config.PHPVersion = extractPHPVersion(phpVersion) - } - - // Detect if Laravel - if _, ok := composer.Require["laravel/framework"]; ok { - config.IsLaravel = true - } - - // Detect if Octane - if _, ok := composer.Require["laravel/octane"]; ok { - config.HasOctane = true - } - - // Detect required PHP extensions - config.PHPExtensions = detectPHPExtensions(composer) - - // Detect frontend assets - config.HasAssets = hasNodeAssets(dir) - if config.HasAssets { - config.PackageManager = DetectPackageManager(dir) - } - - return config, nil -} - -// GenerateDockerfileFromConfig generates a Dockerfile from the given configuration. -func GenerateDockerfileFromConfig(config *DockerfileConfig) string { - var sb strings.Builder - - // Base image - baseTag := cli.Sprintf("latest-php%s", config.PHPVersion) - if config.UseAlpine { - baseTag += "-alpine" - } - - sb.WriteString("# Auto-generated Dockerfile for FrankenPHP\n") - sb.WriteString("# Generated by Core Framework\n\n") - - // Multi-stage build for smaller images - if config.HasAssets { - // Frontend build stage - sb.WriteString("# Stage 1: Build frontend assets\n") - sb.WriteString("FROM node:20-alpine AS frontend\n\n") - sb.WriteString("WORKDIR /app\n\n") - - // Copy package files based on package manager - switch config.PackageManager { - case "pnpm": - sb.WriteString("RUN corepack enable && corepack prepare pnpm@latest --activate\n\n") - sb.WriteString("COPY package.json pnpm-lock.yaml ./\n") - sb.WriteString("RUN pnpm install --frozen-lockfile\n\n") - case "yarn": - sb.WriteString("COPY package.json yarn.lock ./\n") - sb.WriteString("RUN yarn install --frozen-lockfile\n\n") - case "bun": - sb.WriteString("RUN npm install -g bun\n\n") - sb.WriteString("COPY package.json bun.lockb ./\n") - sb.WriteString("RUN bun install --frozen-lockfile\n\n") - default: // npm - sb.WriteString("COPY package.json package-lock.json ./\n") - sb.WriteString("RUN npm ci\n\n") - } - - sb.WriteString("COPY . .\n\n") - - // Build command - switch config.PackageManager { - case "pnpm": - sb.WriteString("RUN pnpm run build\n\n") - case "yarn": - sb.WriteString("RUN yarn build\n\n") - case "bun": - sb.WriteString("RUN bun run build\n\n") - default: - sb.WriteString("RUN npm run build\n\n") - } - } - - // PHP build stage - stageNum := 2 - if config.HasAssets { - sb.WriteString(cli.Sprintf("# Stage %d: PHP application\n", stageNum)) - } - sb.WriteString(cli.Sprintf("FROM %s:%s AS app\n\n", config.BaseImage, baseTag)) - - sb.WriteString("WORKDIR /app\n\n") - - // Install PHP extensions if needed - if len(config.PHPExtensions) > 0 { - sb.WriteString("# Install PHP extensions\n") - sb.WriteString(cli.Sprintf("RUN install-php-extensions %s\n\n", strings.Join(config.PHPExtensions, " "))) - } - - // Copy composer files first for better caching - sb.WriteString("# Copy composer files\n") - sb.WriteString("COPY composer.json composer.lock ./\n\n") - - // Install composer dependencies - sb.WriteString("# Install PHP dependencies\n") - sb.WriteString("RUN composer install --no-dev --no-scripts --optimize-autoloader --no-interaction\n\n") - - // Copy application code - sb.WriteString("# Copy application code\n") - sb.WriteString("COPY . .\n\n") - - // Run post-install scripts - sb.WriteString("# Run composer scripts\n") - sb.WriteString("RUN composer dump-autoload --optimize\n\n") - - // Copy frontend assets if built - if config.HasAssets { - sb.WriteString("# Copy built frontend assets\n") - sb.WriteString("COPY --from=frontend /app/public/build public/build\n\n") - } - - // Laravel-specific setup - if config.IsLaravel { - sb.WriteString("# Laravel setup\n") - sb.WriteString("RUN php artisan config:cache \\\n") - sb.WriteString(" && php artisan route:cache \\\n") - sb.WriteString(" && php artisan view:cache\n\n") - - // Set permissions - sb.WriteString("# Set permissions for Laravel\n") - sb.WriteString("RUN chown -R www-data:www-data storage bootstrap/cache \\\n") - sb.WriteString(" && chmod -R 775 storage bootstrap/cache\n\n") - } - - // Expose ports - sb.WriteString("# Expose ports\n") - sb.WriteString("EXPOSE 80 443\n\n") - - // Health check - sb.WriteString("# Health check\n") - sb.WriteString("HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \\\n") - sb.WriteString(" CMD curl -f http://localhost/up || exit 1\n\n") - - // Start command - sb.WriteString("# Start FrankenPHP\n") - if config.HasOctane { - sb.WriteString("CMD [\"php\", \"artisan\", \"octane:start\", \"--server=frankenphp\", \"--host=0.0.0.0\", \"--port=80\"]\n") - } else { - sb.WriteString("CMD [\"frankenphp\", \"run\", \"--config\", \"/etc/caddy/Caddyfile\"]\n") - } - - return sb.String() -} - -// ComposerJSON represents the structure of composer.json. -type ComposerJSON struct { - Name string `json:"name"` - Require map[string]string `json:"require"` - RequireDev map[string]string `json:"require-dev"` -} - -// detectPHPExtensions detects required PHP extensions from composer.json. -func detectPHPExtensions(composer ComposerJSON) []string { - extensionMap := make(map[string]bool) - - // Check for common packages and their required extensions - packageExtensions := map[string][]string{ - // Database - "doctrine/dbal": {"pdo_mysql", "pdo_pgsql"}, - "illuminate/database": {"pdo_mysql"}, - "laravel/framework": {"pdo_mysql", "bcmath", "ctype", "fileinfo", "mbstring", "openssl", "tokenizer", "xml"}, - "mongodb/mongodb": {"mongodb"}, - "predis/predis": {"redis"}, - "phpredis/phpredis": {"redis"}, - "laravel/horizon": {"redis", "pcntl"}, - "aws/aws-sdk-php": {"curl"}, - "intervention/image": {"gd"}, - "intervention/image-laravel": {"gd"}, - "spatie/image": {"gd"}, - "league/flysystem-aws-s3-v3": {"curl"}, - "guzzlehttp/guzzle": {"curl"}, - "nelmio/cors-bundle": {}, - // Queues - "laravel/reverb": {"pcntl"}, - "php-amqplib/php-amqplib": {"sockets"}, - // Misc - "moneyphp/money": {"bcmath", "intl"}, - "symfony/intl": {"intl"}, - "nesbot/carbon": {"intl"}, - "spatie/laravel-medialibrary": {"exif", "gd"}, - } - - // Check all require and require-dev dependencies - allDeps := make(map[string]string) - for pkg, ver := range composer.Require { - allDeps[pkg] = ver - } - for pkg, ver := range composer.RequireDev { - allDeps[pkg] = ver - } - - // Find required extensions - for pkg := range allDeps { - if exts, ok := packageExtensions[pkg]; ok { - for _, ext := range exts { - extensionMap[ext] = true - } - } - - // Check for direct ext- requirements - if strings.HasPrefix(pkg, "ext-") { - ext := strings.TrimPrefix(pkg, "ext-") - // Skip extensions that are built into PHP - builtIn := map[string]bool{ - "json": true, "ctype": true, "iconv": true, - "session": true, "simplexml": true, "pdo": true, - "xml": true, "tokenizer": true, - } - if !builtIn[ext] { - extensionMap[ext] = true - } - } - } - - // Convert to sorted slice - extensions := make([]string, 0, len(extensionMap)) - for ext := range extensionMap { - extensions = append(extensions, ext) - } - sort.Strings(extensions) - - return extensions -} - -// extractPHPVersion extracts a clean PHP version from a composer constraint. -func extractPHPVersion(constraint string) string { - // Handle common formats: ^8.2, >=8.2, 8.2.*, ~8.2 - constraint = strings.TrimLeft(constraint, "^>=~") - constraint = strings.TrimRight(constraint, ".*") - - // Extract major.minor - parts := strings.Split(constraint, ".") - if len(parts) >= 2 { - return parts[0] + "." + parts[1] - } - if len(parts) == 1 { - return parts[0] + ".0" - } - - return "8.3" // default -} - -// hasNodeAssets checks if the project has frontend assets. -func hasNodeAssets(dir string) bool { - packageJSON := filepath.Join(dir, "package.json") - if _, err := os.Stat(packageJSON); err != nil { - return false - } - - // Check for build script in package.json - data, err := os.ReadFile(packageJSON) - if err != nil { - return false - } - - var pkg struct { - Scripts map[string]string `json:"scripts"` - } - - if err := json.Unmarshal(data, &pkg); err != nil { - return false - } - - // Check if there's a build script - _, hasBuild := pkg.Scripts["build"] - return hasBuild -} - -// GenerateDockerignore generates a .dockerignore file content for PHP projects. -func GenerateDockerignore(dir string) string { - var sb strings.Builder - - sb.WriteString("# Git\n") - sb.WriteString(".git\n") - sb.WriteString(".gitignore\n") - sb.WriteString(".gitattributes\n\n") - - sb.WriteString("# Node\n") - sb.WriteString("node_modules\n\n") - - sb.WriteString("# Development\n") - sb.WriteString(".env\n") - sb.WriteString(".env.local\n") - sb.WriteString(".env.*.local\n") - sb.WriteString("*.log\n") - sb.WriteString(".phpunit.result.cache\n") - sb.WriteString("phpunit.xml\n") - sb.WriteString(".php-cs-fixer.cache\n") - sb.WriteString("phpstan.neon\n\n") - - sb.WriteString("# IDE\n") - sb.WriteString(".idea\n") - sb.WriteString(".vscode\n") - sb.WriteString("*.swp\n") - sb.WriteString("*.swo\n\n") - - sb.WriteString("# Laravel specific\n") - sb.WriteString("storage/app/*\n") - sb.WriteString("storage/logs/*\n") - sb.WriteString("storage/framework/cache/*\n") - sb.WriteString("storage/framework/sessions/*\n") - sb.WriteString("storage/framework/views/*\n") - sb.WriteString("bootstrap/cache/*\n\n") - - sb.WriteString("# Build artifacts\n") - sb.WriteString("public/hot\n") - sb.WriteString("public/storage\n") - sb.WriteString("vendor\n\n") - - sb.WriteString("# Docker\n") - sb.WriteString("Dockerfile*\n") - sb.WriteString("docker-compose*.yml\n") - sb.WriteString(".dockerignore\n\n") - - sb.WriteString("# Documentation\n") - sb.WriteString("README.md\n") - sb.WriteString("CHANGELOG.md\n") - sb.WriteString("docs\n") - - return sb.String() -} diff --git a/pkg/php/dockerfile_test.go b/pkg/php/dockerfile_test.go deleted file mode 100644 index 5c3b1ce..0000000 --- a/pkg/php/dockerfile_test.go +++ /dev/null @@ -1,634 +0,0 @@ -package php - -import ( - "os" - "path/filepath" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestGenerateDockerfile_Good(t *testing.T) { - t.Run("basic Laravel project", func(t *testing.T) { - dir := t.TempDir() - - // Create composer.json - composerJSON := `{ - "name": "test/laravel-project", - "require": { - "php": "^8.2", - "laravel/framework": "^11.0" - } - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - // Create composer.lock - err = os.WriteFile(filepath.Join(dir, "composer.lock"), []byte("{}"), 0644) - require.NoError(t, err) - - content, err := GenerateDockerfile(dir) - require.NoError(t, err) - - // Check content - assert.Contains(t, content, "FROM dunglas/frankenphp") - assert.Contains(t, content, "php8.2") - assert.Contains(t, content, "COPY composer.json composer.lock") - assert.Contains(t, content, "composer install") - assert.Contains(t, content, "EXPOSE 80 443") - }) - - t.Run("Laravel project with Octane", func(t *testing.T) { - dir := t.TempDir() - - composerJSON := `{ - "name": "test/laravel-octane", - "require": { - "php": "^8.3", - "laravel/framework": "^11.0", - "laravel/octane": "^2.0" - } - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(dir, "composer.lock"), []byte("{}"), 0644) - require.NoError(t, err) - - content, err := GenerateDockerfile(dir) - require.NoError(t, err) - - assert.Contains(t, content, "php8.3") - assert.Contains(t, content, "octane:start") - }) - - t.Run("project with frontend assets", func(t *testing.T) { - dir := t.TempDir() - - composerJSON := `{ - "name": "test/laravel-vite", - "require": { - "php": "^8.3", - "laravel/framework": "^11.0" - } - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(dir, "composer.lock"), []byte("{}"), 0644) - require.NoError(t, err) - - packageJSON := `{ - "name": "test-app", - "scripts": { - "dev": "vite", - "build": "vite build" - } - }` - err = os.WriteFile(filepath.Join(dir, "package.json"), []byte(packageJSON), 0644) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(dir, "package-lock.json"), []byte("{}"), 0644) - require.NoError(t, err) - - content, err := GenerateDockerfile(dir) - require.NoError(t, err) - - // Should have multi-stage build - assert.Contains(t, content, "FROM node:20-alpine AS frontend") - assert.Contains(t, content, "npm ci") - assert.Contains(t, content, "npm run build") - assert.Contains(t, content, "COPY --from=frontend") - }) - - t.Run("project with pnpm", func(t *testing.T) { - dir := t.TempDir() - - composerJSON := `{ - "name": "test/laravel-pnpm", - "require": { - "php": "^8.3", - "laravel/framework": "^11.0" - } - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(dir, "composer.lock"), []byte("{}"), 0644) - require.NoError(t, err) - - packageJSON := `{ - "name": "test-app", - "scripts": { - "build": "vite build" - } - }` - err = os.WriteFile(filepath.Join(dir, "package.json"), []byte(packageJSON), 0644) - require.NoError(t, err) - - // Create pnpm-lock.yaml - err = os.WriteFile(filepath.Join(dir, "pnpm-lock.yaml"), []byte("lockfileVersion: 6.0"), 0644) - require.NoError(t, err) - - content, err := GenerateDockerfile(dir) - require.NoError(t, err) - - assert.Contains(t, content, "pnpm install") - assert.Contains(t, content, "pnpm run build") - }) - - t.Run("project with Redis dependency", func(t *testing.T) { - dir := t.TempDir() - - composerJSON := `{ - "name": "test/laravel-redis", - "require": { - "php": "^8.3", - "laravel/framework": "^11.0", - "predis/predis": "^2.0" - } - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(dir, "composer.lock"), []byte("{}"), 0644) - require.NoError(t, err) - - content, err := GenerateDockerfile(dir) - require.NoError(t, err) - - assert.Contains(t, content, "install-php-extensions") - assert.Contains(t, content, "redis") - }) - - t.Run("project with explicit ext- requirements", func(t *testing.T) { - dir := t.TempDir() - - composerJSON := `{ - "name": "test/with-extensions", - "require": { - "php": "^8.3", - "ext-gd": "*", - "ext-imagick": "*", - "ext-intl": "*" - } - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(dir, "composer.lock"), []byte("{}"), 0644) - require.NoError(t, err) - - content, err := GenerateDockerfile(dir) - require.NoError(t, err) - - assert.Contains(t, content, "install-php-extensions") - assert.Contains(t, content, "gd") - assert.Contains(t, content, "imagick") - assert.Contains(t, content, "intl") - }) -} - -func TestGenerateDockerfile_Bad(t *testing.T) { - t.Run("missing composer.json", func(t *testing.T) { - dir := t.TempDir() - - _, err := GenerateDockerfile(dir) - assert.Error(t, err) - assert.Contains(t, err.Error(), "composer.json") - }) - - t.Run("invalid composer.json", func(t *testing.T) { - dir := t.TempDir() - - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte("not json{"), 0644) - require.NoError(t, err) - - _, err = GenerateDockerfile(dir) - assert.Error(t, err) - }) -} - -func TestDetectDockerfileConfig_Good(t *testing.T) { - t.Run("full Laravel project", func(t *testing.T) { - dir := t.TempDir() - - composerJSON := `{ - "name": "test/full-laravel", - "require": { - "php": "^8.3", - "laravel/framework": "^11.0", - "laravel/octane": "^2.0", - "predis/predis": "^2.0", - "intervention/image": "^3.0" - } - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - packageJSON := `{"scripts": {"build": "vite build"}}` - err = os.WriteFile(filepath.Join(dir, "package.json"), []byte(packageJSON), 0644) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(dir, "yarn.lock"), []byte(""), 0644) - require.NoError(t, err) - - config, err := DetectDockerfileConfig(dir) - require.NoError(t, err) - - assert.Equal(t, "8.3", config.PHPVersion) - assert.True(t, config.IsLaravel) - assert.True(t, config.HasOctane) - assert.True(t, config.HasAssets) - assert.Equal(t, "yarn", config.PackageManager) - assert.Contains(t, config.PHPExtensions, "redis") - assert.Contains(t, config.PHPExtensions, "gd") - }) -} - -func TestDetectDockerfileConfig_Bad(t *testing.T) { - t.Run("non-existent directory", func(t *testing.T) { - _, err := DetectDockerfileConfig("/non/existent/path") - assert.Error(t, err) - }) -} - -func TestExtractPHPVersion_Good(t *testing.T) { - tests := []struct { - constraint string - expected string - }{ - {"^8.2", "8.2"}, - {"^8.3", "8.3"}, - {">=8.2", "8.2"}, - {"~8.2", "8.2"}, - {"8.2.*", "8.2"}, - {"8.2.0", "8.2"}, - {"8", "8.0"}, - } - - for _, tt := range tests { - t.Run(tt.constraint, func(t *testing.T) { - result := extractPHPVersion(tt.constraint) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestDetectPHPExtensions_Good(t *testing.T) { - t.Run("detects Redis from predis", func(t *testing.T) { - composer := ComposerJSON{ - Require: map[string]string{ - "predis/predis": "^2.0", - }, - } - - extensions := detectPHPExtensions(composer) - assert.Contains(t, extensions, "redis") - }) - - t.Run("detects GD from intervention/image", func(t *testing.T) { - composer := ComposerJSON{ - Require: map[string]string{ - "intervention/image": "^3.0", - }, - } - - extensions := detectPHPExtensions(composer) - assert.Contains(t, extensions, "gd") - }) - - t.Run("detects multiple extensions from Laravel", func(t *testing.T) { - composer := ComposerJSON{ - Require: map[string]string{ - "laravel/framework": "^11.0", - }, - } - - extensions := detectPHPExtensions(composer) - assert.Contains(t, extensions, "pdo_mysql") - assert.Contains(t, extensions, "bcmath") - }) - - t.Run("detects explicit ext- requirements", func(t *testing.T) { - composer := ComposerJSON{ - Require: map[string]string{ - "ext-gd": "*", - "ext-imagick": "*", - }, - } - - extensions := detectPHPExtensions(composer) - assert.Contains(t, extensions, "gd") - assert.Contains(t, extensions, "imagick") - }) - - t.Run("skips built-in extensions", func(t *testing.T) { - composer := ComposerJSON{ - Require: map[string]string{ - "ext-json": "*", - "ext-session": "*", - "ext-pdo": "*", - }, - } - - extensions := detectPHPExtensions(composer) - assert.NotContains(t, extensions, "json") - assert.NotContains(t, extensions, "session") - assert.NotContains(t, extensions, "pdo") - }) - - t.Run("sorts extensions alphabetically", func(t *testing.T) { - composer := ComposerJSON{ - Require: map[string]string{ - "ext-zip": "*", - "ext-gd": "*", - "ext-intl": "*", - }, - } - - extensions := detectPHPExtensions(composer) - - // Check they are sorted - for i := 1; i < len(extensions); i++ { - assert.True(t, extensions[i-1] < extensions[i], - "extensions should be sorted: %v", extensions) - } - }) -} - -func TestHasNodeAssets_Good(t *testing.T) { - t.Run("with build script", func(t *testing.T) { - dir := t.TempDir() - - packageJSON := `{ - "name": "test", - "scripts": { - "dev": "vite", - "build": "vite build" - } - }` - err := os.WriteFile(filepath.Join(dir, "package.json"), []byte(packageJSON), 0644) - require.NoError(t, err) - - assert.True(t, hasNodeAssets(dir)) - }) -} - -func TestHasNodeAssets_Bad(t *testing.T) { - t.Run("no package.json", func(t *testing.T) { - dir := t.TempDir() - assert.False(t, hasNodeAssets(dir)) - }) - - t.Run("no build script", func(t *testing.T) { - dir := t.TempDir() - - packageJSON := `{ - "name": "test", - "scripts": { - "dev": "vite" - } - }` - err := os.WriteFile(filepath.Join(dir, "package.json"), []byte(packageJSON), 0644) - require.NoError(t, err) - - assert.False(t, hasNodeAssets(dir)) - }) - - t.Run("invalid package.json", func(t *testing.T) { - dir := t.TempDir() - - err := os.WriteFile(filepath.Join(dir, "package.json"), []byte("invalid{"), 0644) - require.NoError(t, err) - - assert.False(t, hasNodeAssets(dir)) - }) -} - -func TestGenerateDockerignore_Good(t *testing.T) { - t.Run("generates complete dockerignore", func(t *testing.T) { - dir := t.TempDir() - content := GenerateDockerignore(dir) - - // Check key entries - assert.Contains(t, content, ".git") - assert.Contains(t, content, "node_modules") - assert.Contains(t, content, ".env") - assert.Contains(t, content, "vendor") - assert.Contains(t, content, "storage/logs/*") - assert.Contains(t, content, ".idea") - assert.Contains(t, content, ".vscode") - }) -} - -func TestGenerateDockerfileFromConfig_Good(t *testing.T) { - t.Run("minimal config", func(t *testing.T) { - config := &DockerfileConfig{ - PHPVersion: "8.3", - BaseImage: "dunglas/frankenphp", - UseAlpine: true, - } - - content := GenerateDockerfileFromConfig(config) - - assert.Contains(t, content, "FROM dunglas/frankenphp:latest-php8.3-alpine") - assert.Contains(t, content, "WORKDIR /app") - assert.Contains(t, content, "COPY composer.json composer.lock") - assert.Contains(t, content, "EXPOSE 80 443") - }) - - t.Run("with extensions", func(t *testing.T) { - config := &DockerfileConfig{ - PHPVersion: "8.3", - BaseImage: "dunglas/frankenphp", - UseAlpine: true, - PHPExtensions: []string{"redis", "gd", "intl"}, - } - - content := GenerateDockerfileFromConfig(config) - - assert.Contains(t, content, "install-php-extensions redis gd intl") - }) - - t.Run("Laravel with Octane", func(t *testing.T) { - config := &DockerfileConfig{ - PHPVersion: "8.3", - BaseImage: "dunglas/frankenphp", - UseAlpine: true, - IsLaravel: true, - HasOctane: true, - } - - content := GenerateDockerfileFromConfig(config) - - assert.Contains(t, content, "php artisan config:cache") - assert.Contains(t, content, "php artisan route:cache") - assert.Contains(t, content, "php artisan view:cache") - assert.Contains(t, content, "chown -R www-data:www-data storage") - assert.Contains(t, content, "octane:start") - }) - - t.Run("with frontend assets", func(t *testing.T) { - config := &DockerfileConfig{ - PHPVersion: "8.3", - BaseImage: "dunglas/frankenphp", - UseAlpine: true, - HasAssets: true, - PackageManager: "npm", - } - - content := GenerateDockerfileFromConfig(config) - - // Multi-stage build - assert.Contains(t, content, "FROM node:20-alpine AS frontend") - assert.Contains(t, content, "COPY package.json package-lock.json") - assert.Contains(t, content, "RUN npm ci") - assert.Contains(t, content, "RUN npm run build") - assert.Contains(t, content, "COPY --from=frontend /app/public/build public/build") - }) - - t.Run("with yarn", func(t *testing.T) { - config := &DockerfileConfig{ - PHPVersion: "8.3", - BaseImage: "dunglas/frankenphp", - UseAlpine: true, - HasAssets: true, - PackageManager: "yarn", - } - - content := GenerateDockerfileFromConfig(config) - - assert.Contains(t, content, "COPY package.json yarn.lock") - assert.Contains(t, content, "yarn install --frozen-lockfile") - assert.Contains(t, content, "yarn build") - }) - - t.Run("with bun", func(t *testing.T) { - config := &DockerfileConfig{ - PHPVersion: "8.3", - BaseImage: "dunglas/frankenphp", - UseAlpine: true, - HasAssets: true, - PackageManager: "bun", - } - - content := GenerateDockerfileFromConfig(config) - - assert.Contains(t, content, "npm install -g bun") - assert.Contains(t, content, "COPY package.json bun.lockb") - assert.Contains(t, content, "bun install --frozen-lockfile") - assert.Contains(t, content, "bun run build") - }) - - t.Run("non-alpine image", func(t *testing.T) { - config := &DockerfileConfig{ - PHPVersion: "8.3", - BaseImage: "dunglas/frankenphp", - UseAlpine: false, - } - - content := GenerateDockerfileFromConfig(config) - - assert.Contains(t, content, "FROM dunglas/frankenphp:latest-php8.3 AS app") - assert.NotContains(t, content, "alpine") - }) -} - -func TestIsPHPProject_Good(t *testing.T) { - t.Run("project with composer.json", func(t *testing.T) { - dir := t.TempDir() - - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte("{}"), 0644) - require.NoError(t, err) - - assert.True(t, IsPHPProject(dir)) - }) -} - -func TestIsPHPProject_Bad(t *testing.T) { - t.Run("project without composer.json", func(t *testing.T) { - dir := t.TempDir() - assert.False(t, IsPHPProject(dir)) - }) - - t.Run("non-existent directory", func(t *testing.T) { - assert.False(t, IsPHPProject("/non/existent/path")) - }) -} - -func TestExtractPHPVersion_Edge(t *testing.T) { - t.Run("handles single major version", func(t *testing.T) { - result := extractPHPVersion("8") - assert.Equal(t, "8.0", result) - }) -} - -func TestDetectPHPExtensions_RequireDev(t *testing.T) { - t.Run("detects extensions from require-dev", func(t *testing.T) { - composer := ComposerJSON{ - RequireDev: map[string]string{ - "predis/predis": "^2.0", - }, - } - - extensions := detectPHPExtensions(composer) - assert.Contains(t, extensions, "redis") - }) -} - -func TestDockerfileStructure_Good(t *testing.T) { - t.Run("Dockerfile has proper structure", func(t *testing.T) { - dir := t.TempDir() - - composerJSON := `{ - "name": "test/app", - "require": { - "php": "^8.3", - "laravel/framework": "^11.0", - "laravel/octane": "^2.0", - "predis/predis": "^2.0" - } - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(dir, "composer.lock"), []byte("{}"), 0644) - require.NoError(t, err) - - packageJSON := `{"scripts": {"build": "vite build"}}` - err = os.WriteFile(filepath.Join(dir, "package.json"), []byte(packageJSON), 0644) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(dir, "package-lock.json"), []byte("{}"), 0644) - require.NoError(t, err) - - content, err := GenerateDockerfile(dir) - require.NoError(t, err) - - lines := strings.Split(content, "\n") - var fromCount, workdirCount, copyCount, runCount, exposeCount, cmdCount int - - for _, line := range lines { - trimmed := strings.TrimSpace(line) - switch { - case strings.HasPrefix(trimmed, "FROM "): - fromCount++ - case strings.HasPrefix(trimmed, "WORKDIR "): - workdirCount++ - case strings.HasPrefix(trimmed, "COPY "): - copyCount++ - case strings.HasPrefix(trimmed, "RUN "): - runCount++ - case strings.HasPrefix(trimmed, "EXPOSE "): - exposeCount++ - case strings.HasPrefix(trimmed, "CMD ["): - // Only count actual CMD instructions, not HEALTHCHECK CMD - cmdCount++ - } - } - - // Multi-stage build should have 2 FROM statements - assert.Equal(t, 2, fromCount, "should have 2 FROM statements for multi-stage build") - - // Should have proper structure - assert.GreaterOrEqual(t, workdirCount, 1, "should have WORKDIR") - assert.GreaterOrEqual(t, copyCount, 3, "should have multiple COPY statements") - assert.GreaterOrEqual(t, runCount, 2, "should have multiple RUN statements") - assert.Equal(t, 1, exposeCount, "should have exactly one EXPOSE") - assert.Equal(t, 1, cmdCount, "should have exactly one CMD") - }) -} diff --git a/pkg/php/i18n.go b/pkg/php/i18n.go deleted file mode 100644 index f0f7194..0000000 --- a/pkg/php/i18n.go +++ /dev/null @@ -1,16 +0,0 @@ -// Package php provides PHP/Laravel development tools. -package php - -import ( - "embed" - - "github.com/host-uk/core/pkg/i18n" -) - -//go:embed locales/*.json -var localeFS embed.FS - -func init() { - // Register PHP translations with the i18n system - i18n.RegisterLocales(localeFS, "locales") -} diff --git a/pkg/php/locales/en_GB.json b/pkg/php/locales/en_GB.json deleted file mode 100644 index 4f74cd8..0000000 --- a/pkg/php/locales/en_GB.json +++ /dev/null @@ -1,147 +0,0 @@ -{ - "cmd": { - "php": { - "short": "Laravel/PHP development tools", - "long": "Laravel and PHP development tools including testing, formatting, static analysis, and deployment", - "label": { - "php": "PHP:", - "audit": "Audit:", - "psalm": "Psalm:", - "rector": "Rector:", - "security": "Security:", - "infection": "Infection:", - "info": "Info:", - "setup": "Setup:" - }, - "error": { - "not_php": "Not a PHP project (no composer.json found)", - "fmt_failed": "Formatting failed", - "fmt_issues": "Style issues found", - "analysis_issues": "Analysis errors found", - "audit_failed": "Audit failed", - "vulns_found": "Vulnerabilities found", - "psalm_not_installed": "Psalm not installed", - "psalm_issues": "Psalm found type errors", - "rector_not_installed": "Rector not installed", - "rector_failed": "Rector failed", - "infection_not_installed": "Infection not installed", - "infection_failed": "Mutation testing failed", - "security_failed": "Security check failed", - "critical_high_issues": "Critical or high severity issues found" - }, - "test": { - "short": "Run PHPUnit/Pest tests", - "long": "Run PHPUnit or Pest tests with optional filtering, parallel execution, and coverage", - "flag": { - "parallel": "Run tests in parallel", - "coverage": "Generate code coverage report", - "filter": "Filter tests by name", - "group": "Run only tests in this group" - } - }, - "fmt": { - "short": "Format PHP code with Laravel Pint", - "long": "Format PHP code using Laravel Pint code style fixer", - "no_formatter": "No code formatter found (install laravel/pint)", - "no_issues": "No style issues found", - "formatting": "Formatting with {{.Formatter}}...", - "flag": { - "fix": "Fix style issues (default: check only)" - } - }, - "analyse": { - "short": "Run PHPStan static analysis", - "long": "Run PHPStan/Larastan for static code analysis", - "no_analyser": "No static analyser found (install phpstan/phpstan or nunomaduro/larastan)", - "flag": { - "level": "Analysis level (0-9, default: from config)", - "memory": "Memory limit (e.g., 2G)" - } - }, - "audit": { - "short": "Security audit for dependencies", - "long": "Audit Composer and NPM dependencies for known vulnerabilities", - "scanning": "Scanning dependencies for vulnerabilities...", - "secure": "No vulnerabilities", - "error": "Audit error", - "vulnerabilities": "{{.Count}} vulnerabilities found", - "found_vulns": "Found {{.Count}} vulnerabilities", - "all_secure": "All dependencies secure", - "completed_errors": "Audit completed with errors", - "flag": { - "fix": "Attempt to fix vulnerabilities" - } - }, - "psalm": { - "short": "Run Psalm static analysis", - "long": "Run Psalm for deep static analysis and type checking", - "not_found": "Psalm not found", - "install": "composer require --dev vimeo/psalm", - "setup": "vendor/bin/psalm --init", - "analysing": "Analysing with Psalm...", - "analysing_fixing": "Analysing and fixing with Psalm...", - "flag": { - "level": "Analysis level (1-8)", - "baseline": "Generate or update baseline", - "show_info": "Show informational issues" - } - }, - "rector": { - "short": "Automated code refactoring", - "long": "Run Rector for automated code upgrades and refactoring", - "not_found": "Rector not found", - "install": "composer require --dev rector/rector", - "setup": "vendor/bin/rector init", - "analysing": "Analysing code for refactoring opportunities...", - "refactoring": "Refactoring code...", - "no_changes": "No refactoring changes needed", - "changes_suggested": "Rector suggests changes (run with --fix to apply)", - "flag": { - "fix": "Apply refactoring changes", - "diff": "Show diff of changes", - "clear_cache": "Clear Rector cache before running" - } - }, - "infection": { - "short": "Mutation testing for test quality", - "long": "Run Infection mutation testing to measure test suite quality", - "not_found": "Infection not found", - "install": "composer require --dev infection/infection", - "note": "This may take a while depending on test suite size", - "complete": "Mutation testing complete", - "flag": { - "min_msi": "Minimum Mutation Score Indicator (0-100)", - "min_covered_msi": "Minimum covered code MSI (0-100)", - "threads": "Number of parallel threads", - "filter": "Filter mutants by file path", - "only_covered": "Only mutate covered code" - } - }, - "security": { - "short": "Security vulnerability scanning", - "long": "Run comprehensive security checks on PHP codebase", - "checks_suffix": " CHECKS", - "summary": "Security scan complete", - "passed": "Passed:", - "critical": "Critical:", - "high": "High:", - "medium": "Medium:", - "low": "Low:", - "flag": { - "severity": "Minimum severity to report (low, medium, high, critical)", - "sarif": "Output in SARIF format", - "url": "Application URL for runtime checks" - } - }, - "qa": { - "short": "Run full QA pipeline", - "long": "Run comprehensive quality assurance: audit, format, analyse, test, and more", - "flag": { - "quick": "Run quick checks only (audit, fmt, stan)", - "full": "Run all stages including slow checks", - "fix": "Auto-fix issues where possible" - } - } - } - } -} diff --git a/pkg/php/packages.go b/pkg/php/packages.go deleted file mode 100644 index ba3501f..0000000 --- a/pkg/php/packages.go +++ /dev/null @@ -1,305 +0,0 @@ -package php - -import ( - "encoding/json" - "os" - "os/exec" - "path/filepath" - - "github.com/host-uk/core/pkg/cli" -) - -// LinkedPackage represents a linked local package. -type LinkedPackage struct { - Name string `json:"name"` - Path string `json:"path"` - Version string `json:"version"` -} - -// composerRepository represents a composer repository entry. -type composerRepository struct { - Type string `json:"type"` - URL string `json:"url,omitempty"` - Options map[string]any `json:"options,omitempty"` -} - -// readComposerJSON reads and parses composer.json from the given directory. -func readComposerJSON(dir string) (map[string]json.RawMessage, error) { - composerPath := filepath.Join(dir, "composer.json") - data, err := os.ReadFile(composerPath) - if err != nil { - return nil, cli.WrapVerb(err, "read", "composer.json") - } - - var raw map[string]json.RawMessage - if err := json.Unmarshal(data, &raw); err != nil { - return nil, cli.WrapVerb(err, "parse", "composer.json") - } - - return raw, nil -} - -// writeComposerJSON writes the composer.json to the given directory. -func writeComposerJSON(dir string, raw map[string]json.RawMessage) error { - composerPath := filepath.Join(dir, "composer.json") - - data, err := json.MarshalIndent(raw, "", " ") - if err != nil { - return cli.WrapVerb(err, "marshal", "composer.json") - } - - // Add trailing newline - data = append(data, '\n') - - if err := os.WriteFile(composerPath, data, 0644); err != nil { - return cli.WrapVerb(err, "write", "composer.json") - } - - return nil -} - -// getRepositories extracts repositories from raw composer.json. -func getRepositories(raw map[string]json.RawMessage) ([]composerRepository, error) { - reposRaw, ok := raw["repositories"] - if !ok { - return []composerRepository{}, nil - } - - var repos []composerRepository - if err := json.Unmarshal(reposRaw, &repos); err != nil { - return nil, cli.WrapVerb(err, "parse", "repositories") - } - - return repos, nil -} - -// setRepositories sets repositories in raw composer.json. -func setRepositories(raw map[string]json.RawMessage, repos []composerRepository) error { - if len(repos) == 0 { - delete(raw, "repositories") - return nil - } - - reposData, err := json.Marshal(repos) - if err != nil { - return cli.WrapVerb(err, "marshal", "repositories") - } - - raw["repositories"] = reposData - return nil -} - -// getPackageInfo reads package name and version from a composer.json in the given path. -func getPackageInfo(packagePath string) (name, version string, err error) { - composerPath := filepath.Join(packagePath, "composer.json") - data, err := os.ReadFile(composerPath) - if err != nil { - return "", "", cli.WrapVerb(err, "read", "package composer.json") - } - - var pkg struct { - Name string `json:"name"` - Version string `json:"version"` - } - - if err := json.Unmarshal(data, &pkg); err != nil { - return "", "", cli.WrapVerb(err, "parse", "package composer.json") - } - - if pkg.Name == "" { - return "", "", cli.Err("package name not found in composer.json") - } - - return pkg.Name, pkg.Version, nil -} - -// LinkPackages adds path repositories to composer.json for local package development. -func LinkPackages(dir string, packages []string) error { - if !IsPHPProject(dir) { - return cli.Err("not a PHP project (missing composer.json)") - } - - raw, err := readComposerJSON(dir) - if err != nil { - return err - } - - repos, err := getRepositories(raw) - if err != nil { - return err - } - - for _, packagePath := range packages { - // Resolve absolute path - absPath, err := filepath.Abs(packagePath) - if err != nil { - return cli.Err("failed to resolve path %s: %w", packagePath, err) - } - - // Verify the path exists and has a composer.json - if !IsPHPProject(absPath) { - return cli.Err("not a PHP package (missing composer.json): %s", absPath) - } - - // Get package name for validation - pkgName, _, err := getPackageInfo(absPath) - if err != nil { - return cli.Err("failed to get package info from %s: %w", absPath, err) - } - - // Check if already linked - alreadyLinked := false - for _, repo := range repos { - if repo.Type == "path" && repo.URL == absPath { - alreadyLinked = true - break - } - } - - if alreadyLinked { - continue - } - - // Add path repository - repos = append(repos, composerRepository{ - Type: "path", - URL: absPath, - Options: map[string]any{ - "symlink": true, - }, - }) - - cli.Print("Linked: %s -> %s\n", pkgName, absPath) - } - - if err := setRepositories(raw, repos); err != nil { - return err - } - - return writeComposerJSON(dir, raw) -} - -// UnlinkPackages removes path repositories from composer.json. -func UnlinkPackages(dir string, packages []string) error { - if !IsPHPProject(dir) { - return cli.Err("not a PHP project (missing composer.json)") - } - - raw, err := readComposerJSON(dir) - if err != nil { - return err - } - - repos, err := getRepositories(raw) - if err != nil { - return err - } - - // Build set of packages to unlink - toUnlink := make(map[string]bool) - for _, pkg := range packages { - toUnlink[pkg] = true - } - - // Filter out unlinked packages - filtered := make([]composerRepository, 0, len(repos)) - for _, repo := range repos { - if repo.Type != "path" { - filtered = append(filtered, repo) - continue - } - - // Check if this repo should be unlinked - shouldUnlink := false - - // Try to get package name from the path - if IsPHPProject(repo.URL) { - pkgName, _, err := getPackageInfo(repo.URL) - if err == nil && toUnlink[pkgName] { - shouldUnlink = true - cli.Print("Unlinked: %s\n", pkgName) - } - } - - // Also check if path matches any of the provided names - for pkg := range toUnlink { - if repo.URL == pkg || filepath.Base(repo.URL) == pkg { - shouldUnlink = true - cli.Print("Unlinked: %s\n", repo.URL) - break - } - } - - if !shouldUnlink { - filtered = append(filtered, repo) - } - } - - if err := setRepositories(raw, filtered); err != nil { - return err - } - - return writeComposerJSON(dir, raw) -} - -// UpdatePackages runs composer update for specific packages. -func UpdatePackages(dir string, packages []string) error { - if !IsPHPProject(dir) { - return cli.Err("not a PHP project (missing composer.json)") - } - - args := []string{"update"} - args = append(args, packages...) - - cmd := exec.Command("composer", args...) - cmd.Dir = dir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - return cmd.Run() -} - -// ListLinkedPackages returns all path repositories from composer.json. -func ListLinkedPackages(dir string) ([]LinkedPackage, error) { - if !IsPHPProject(dir) { - return nil, cli.Err("not a PHP project (missing composer.json)") - } - - raw, err := readComposerJSON(dir) - if err != nil { - return nil, err - } - - repos, err := getRepositories(raw) - if err != nil { - return nil, err - } - - linked := make([]LinkedPackage, 0) - for _, repo := range repos { - if repo.Type != "path" { - continue - } - - pkg := LinkedPackage{ - Path: repo.URL, - } - - // Try to get package info - if IsPHPProject(repo.URL) { - name, version, err := getPackageInfo(repo.URL) - if err == nil { - pkg.Name = name - pkg.Version = version - } - } - - if pkg.Name == "" { - pkg.Name = filepath.Base(repo.URL) - } - - linked = append(linked, pkg) - } - - return linked, nil -} diff --git a/pkg/php/packages_test.go b/pkg/php/packages_test.go deleted file mode 100644 index 4c26b45..0000000 --- a/pkg/php/packages_test.go +++ /dev/null @@ -1,543 +0,0 @@ -package php - -import ( - "encoding/json" - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestReadComposerJSON_Good(t *testing.T) { - t.Run("reads valid composer.json", func(t *testing.T) { - dir := t.TempDir() - composerJSON := `{ - "name": "test/project", - "require": { - "php": "^8.2" - } - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - raw, err := readComposerJSON(dir) - assert.NoError(t, err) - assert.NotNil(t, raw) - assert.Contains(t, string(raw["name"]), "test/project") - }) - - t.Run("preserves all fields", func(t *testing.T) { - dir := t.TempDir() - composerJSON := `{ - "name": "test/project", - "description": "Test project", - "require": {"php": "^8.2"}, - "autoload": {"psr-4": {"App\\": "src/"}} - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - raw, err := readComposerJSON(dir) - assert.NoError(t, err) - assert.Contains(t, string(raw["autoload"]), "psr-4") - }) -} - -func TestReadComposerJSON_Bad(t *testing.T) { - t.Run("missing composer.json", func(t *testing.T) { - dir := t.TempDir() - _, err := readComposerJSON(dir) - assert.Error(t, err) - assert.Contains(t, err.Error(), "Failed to read composer.json") - }) - - t.Run("invalid JSON", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte("not json{"), 0644) - require.NoError(t, err) - - _, err = readComposerJSON(dir) - assert.Error(t, err) - assert.Contains(t, err.Error(), "Failed to parse composer.json") - }) -} - -func TestWriteComposerJSON_Good(t *testing.T) { - t.Run("writes valid composer.json", func(t *testing.T) { - dir := t.TempDir() - raw := make(map[string]json.RawMessage) - raw["name"] = json.RawMessage(`"test/project"`) - - err := writeComposerJSON(dir, raw) - assert.NoError(t, err) - - // Verify file was written - content, err := os.ReadFile(filepath.Join(dir, "composer.json")) - assert.NoError(t, err) - assert.Contains(t, string(content), "test/project") - // Verify trailing newline - assert.True(t, content[len(content)-1] == '\n') - }) - - t.Run("pretty prints with indentation", func(t *testing.T) { - dir := t.TempDir() - raw := make(map[string]json.RawMessage) - raw["name"] = json.RawMessage(`"test/project"`) - raw["require"] = json.RawMessage(`{"php":"^8.2"}`) - - err := writeComposerJSON(dir, raw) - assert.NoError(t, err) - - content, err := os.ReadFile(filepath.Join(dir, "composer.json")) - assert.NoError(t, err) - // Should be indented - assert.Contains(t, string(content), " ") - }) -} - -func TestWriteComposerJSON_Bad(t *testing.T) { - t.Run("fails for non-existent directory", func(t *testing.T) { - raw := make(map[string]json.RawMessage) - raw["name"] = json.RawMessage(`"test/project"`) - - err := writeComposerJSON("/non/existent/path", raw) - assert.Error(t, err) - assert.Contains(t, err.Error(), "Failed to write composer.json") - }) -} -func TestGetRepositories_Good(t *testing.T) { - t.Run("returns empty slice when no repositories", func(t *testing.T) { - raw := make(map[string]json.RawMessage) - raw["name"] = json.RawMessage(`"test/project"`) - - repos, err := getRepositories(raw) - assert.NoError(t, err) - assert.Empty(t, repos) - }) - - t.Run("parses existing repositories", func(t *testing.T) { - raw := make(map[string]json.RawMessage) - raw["name"] = json.RawMessage(`"test/project"`) - raw["repositories"] = json.RawMessage(`[{"type":"path","url":"/path/to/package"}]`) - - repos, err := getRepositories(raw) - assert.NoError(t, err) - assert.Len(t, repos, 1) - assert.Equal(t, "path", repos[0].Type) - assert.Equal(t, "/path/to/package", repos[0].URL) - }) - - t.Run("parses repositories with options", func(t *testing.T) { - raw := make(map[string]json.RawMessage) - raw["repositories"] = json.RawMessage(`[{"type":"path","url":"/path","options":{"symlink":true}}]`) - - repos, err := getRepositories(raw) - assert.NoError(t, err) - assert.Len(t, repos, 1) - assert.NotNil(t, repos[0].Options) - assert.Equal(t, true, repos[0].Options["symlink"]) - }) -} - -func TestGetRepositories_Bad(t *testing.T) { - t.Run("fails for invalid repositories JSON", func(t *testing.T) { - raw := make(map[string]json.RawMessage) - raw["repositories"] = json.RawMessage(`not valid json`) - - _, err := getRepositories(raw) - assert.Error(t, err) - assert.Contains(t, err.Error(), "Failed to parse repositories") - }) -} - -func TestSetRepositories_Good(t *testing.T) { - t.Run("sets repositories", func(t *testing.T) { - raw := make(map[string]json.RawMessage) - repos := []composerRepository{ - {Type: "path", URL: "/path/to/package"}, - } - - err := setRepositories(raw, repos) - assert.NoError(t, err) - assert.Contains(t, string(raw["repositories"]), "/path/to/package") - }) - - t.Run("removes repositories key when empty", func(t *testing.T) { - raw := make(map[string]json.RawMessage) - raw["repositories"] = json.RawMessage(`[{"type":"path"}]`) - - err := setRepositories(raw, []composerRepository{}) - assert.NoError(t, err) - _, exists := raw["repositories"] - assert.False(t, exists) - }) -} - -func TestGetPackageInfo_Good(t *testing.T) { - t.Run("extracts package name and version", func(t *testing.T) { - dir := t.TempDir() - composerJSON := `{ - "name": "vendor/package", - "version": "1.0.0" - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - name, version, err := getPackageInfo(dir) - assert.NoError(t, err) - assert.Equal(t, "vendor/package", name) - assert.Equal(t, "1.0.0", version) - }) - - t.Run("works without version", func(t *testing.T) { - dir := t.TempDir() - composerJSON := `{ - "name": "vendor/package" - }` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - name, version, err := getPackageInfo(dir) - assert.NoError(t, err) - assert.Equal(t, "vendor/package", name) - assert.Equal(t, "", version) - }) -} - -func TestGetPackageInfo_Bad(t *testing.T) { - t.Run("missing composer.json", func(t *testing.T) { - dir := t.TempDir() - _, _, err := getPackageInfo(dir) - assert.Error(t, err) - assert.Contains(t, err.Error(), "Failed to read package composer.json") - }) - - t.Run("invalid JSON", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte("not json{"), 0644) - require.NoError(t, err) - - _, _, err = getPackageInfo(dir) - assert.Error(t, err) - assert.Contains(t, err.Error(), "Failed to parse package composer.json") - }) - - t.Run("missing name", func(t *testing.T) { - dir := t.TempDir() - composerJSON := `{"version": "1.0.0"}` - err := os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - _, _, err = getPackageInfo(dir) - assert.Error(t, err) - assert.Contains(t, err.Error(), "package name not found") - }) -} - -func TestLinkPackages_Good(t *testing.T) { - t.Run("links a package", func(t *testing.T) { - // Create project directory - projectDir := t.TempDir() - err := os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(`{"name":"test/project"}`), 0644) - require.NoError(t, err) - - // Create package directory - packageDir := t.TempDir() - err = os.WriteFile(filepath.Join(packageDir, "composer.json"), []byte(`{"name":"vendor/package"}`), 0644) - require.NoError(t, err) - - err = LinkPackages(projectDir, []string{packageDir}) - assert.NoError(t, err) - - // Verify repository was added - raw, err := readComposerJSON(projectDir) - assert.NoError(t, err) - repos, err := getRepositories(raw) - assert.NoError(t, err) - assert.Len(t, repos, 1) - assert.Equal(t, "path", repos[0].Type) - }) - - t.Run("skips already linked package", func(t *testing.T) { - // Create project with existing repository - projectDir := t.TempDir() - packageDir := t.TempDir() - - err := os.WriteFile(filepath.Join(packageDir, "composer.json"), []byte(`{"name":"vendor/package"}`), 0644) - require.NoError(t, err) - - absPackagePath, _ := filepath.Abs(packageDir) - composerJSON := `{ - "name": "test/project", - "repositories": [{"type":"path","url":"` + absPackagePath + `"}] - }` - err = os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - // Link again - should not add duplicate - err = LinkPackages(projectDir, []string{packageDir}) - assert.NoError(t, err) - - raw, err := readComposerJSON(projectDir) - assert.NoError(t, err) - repos, err := getRepositories(raw) - assert.NoError(t, err) - assert.Len(t, repos, 1) // Still only one - }) - - t.Run("links multiple packages", func(t *testing.T) { - projectDir := t.TempDir() - err := os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(`{"name":"test/project"}`), 0644) - require.NoError(t, err) - - pkg1Dir := t.TempDir() - err = os.WriteFile(filepath.Join(pkg1Dir, "composer.json"), []byte(`{"name":"vendor/pkg1"}`), 0644) - require.NoError(t, err) - - pkg2Dir := t.TempDir() - err = os.WriteFile(filepath.Join(pkg2Dir, "composer.json"), []byte(`{"name":"vendor/pkg2"}`), 0644) - require.NoError(t, err) - - err = LinkPackages(projectDir, []string{pkg1Dir, pkg2Dir}) - assert.NoError(t, err) - - raw, err := readComposerJSON(projectDir) - assert.NoError(t, err) - repos, err := getRepositories(raw) - assert.NoError(t, err) - assert.Len(t, repos, 2) - }) -} - -func TestLinkPackages_Bad(t *testing.T) { - t.Run("fails for non-PHP project", func(t *testing.T) { - dir := t.TempDir() - err := LinkPackages(dir, []string{"/path/to/package"}) - assert.Error(t, err) - assert.Contains(t, err.Error(), "not a PHP project") - }) - - t.Run("fails for non-PHP package", func(t *testing.T) { - projectDir := t.TempDir() - err := os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(`{"name":"test/project"}`), 0644) - require.NoError(t, err) - - packageDir := t.TempDir() - // No composer.json in package - - err = LinkPackages(projectDir, []string{packageDir}) - assert.Error(t, err) - assert.Contains(t, err.Error(), "not a PHP package") - }) -} - -func TestUnlinkPackages_Good(t *testing.T) { - t.Run("unlinks package by name", func(t *testing.T) { - projectDir := t.TempDir() - packageDir := t.TempDir() - - err := os.WriteFile(filepath.Join(packageDir, "composer.json"), []byte(`{"name":"vendor/package"}`), 0644) - require.NoError(t, err) - - absPackagePath, _ := filepath.Abs(packageDir) - composerJSON := `{ - "name": "test/project", - "repositories": [{"type":"path","url":"` + absPackagePath + `"}] - }` - err = os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - err = UnlinkPackages(projectDir, []string{"vendor/package"}) - assert.NoError(t, err) - - raw, err := readComposerJSON(projectDir) - assert.NoError(t, err) - repos, err := getRepositories(raw) - assert.NoError(t, err) - assert.Len(t, repos, 0) - }) - - t.Run("unlinks package by path", func(t *testing.T) { - projectDir := t.TempDir() - packageDir := t.TempDir() - - absPackagePath, _ := filepath.Abs(packageDir) - composerJSON := `{ - "name": "test/project", - "repositories": [{"type":"path","url":"` + absPackagePath + `"}] - }` - err := os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - err = UnlinkPackages(projectDir, []string{absPackagePath}) - assert.NoError(t, err) - - raw, err := readComposerJSON(projectDir) - assert.NoError(t, err) - repos, err := getRepositories(raw) - assert.NoError(t, err) - assert.Len(t, repos, 0) - }) - - t.Run("keeps non-path repositories", func(t *testing.T) { - projectDir := t.TempDir() - composerJSON := `{ - "name": "test/project", - "repositories": [ - {"type":"vcs","url":"https://github.com/vendor/package"}, - {"type":"path","url":"/local/path"} - ] - }` - err := os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - err = UnlinkPackages(projectDir, []string{"/local/path"}) - assert.NoError(t, err) - - raw, err := readComposerJSON(projectDir) - assert.NoError(t, err) - repos, err := getRepositories(raw) - assert.NoError(t, err) - assert.Len(t, repos, 1) - assert.Equal(t, "vcs", repos[0].Type) - }) -} - -func TestUnlinkPackages_Bad(t *testing.T) { - t.Run("fails for non-PHP project", func(t *testing.T) { - dir := t.TempDir() - err := UnlinkPackages(dir, []string{"vendor/package"}) - assert.Error(t, err) - assert.Contains(t, err.Error(), "not a PHP project") - }) -} - -func TestListLinkedPackages_Good(t *testing.T) { - t.Run("lists linked packages", func(t *testing.T) { - projectDir := t.TempDir() - packageDir := t.TempDir() - - err := os.WriteFile(filepath.Join(packageDir, "composer.json"), []byte(`{"name":"vendor/package","version":"1.0.0"}`), 0644) - require.NoError(t, err) - - absPackagePath, _ := filepath.Abs(packageDir) - composerJSON := `{ - "name": "test/project", - "repositories": [{"type":"path","url":"` + absPackagePath + `"}] - }` - err = os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - linked, err := ListLinkedPackages(projectDir) - assert.NoError(t, err) - assert.Len(t, linked, 1) - assert.Equal(t, "vendor/package", linked[0].Name) - assert.Equal(t, "1.0.0", linked[0].Version) - assert.Equal(t, absPackagePath, linked[0].Path) - }) - - t.Run("returns empty list when no linked packages", func(t *testing.T) { - projectDir := t.TempDir() - err := os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(`{"name":"test/project"}`), 0644) - require.NoError(t, err) - - linked, err := ListLinkedPackages(projectDir) - assert.NoError(t, err) - assert.Empty(t, linked) - }) - - t.Run("uses basename when package info unavailable", func(t *testing.T) { - projectDir := t.TempDir() - composerJSON := `{ - "name": "test/project", - "repositories": [{"type":"path","url":"/nonexistent/package-name"}] - }` - err := os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - linked, err := ListLinkedPackages(projectDir) - assert.NoError(t, err) - assert.Len(t, linked, 1) - assert.Equal(t, "package-name", linked[0].Name) - }) - - t.Run("ignores non-path repositories", func(t *testing.T) { - projectDir := t.TempDir() - composerJSON := `{ - "name": "test/project", - "repositories": [ - {"type":"vcs","url":"https://github.com/vendor/package"} - ] - }` - err := os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - linked, err := ListLinkedPackages(projectDir) - assert.NoError(t, err) - assert.Empty(t, linked) - }) -} - -func TestListLinkedPackages_Bad(t *testing.T) { - t.Run("fails for non-PHP project", func(t *testing.T) { - dir := t.TempDir() - _, err := ListLinkedPackages(dir) - assert.Error(t, err) - assert.Contains(t, err.Error(), "not a PHP project") - }) -} - -func TestUpdatePackages_Bad(t *testing.T) { - t.Run("fails for non-PHP project", func(t *testing.T) { - dir := t.TempDir() - err := UpdatePackages(dir, []string{"vendor/package"}) - assert.Error(t, err) - assert.Contains(t, err.Error(), "not a PHP project") - }) -} - -func TestUpdatePackages_Good(t *testing.T) { - t.Skip("requires Composer installed") - - t.Run("runs composer update", func(t *testing.T) { - projectDir := t.TempDir() - err := os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(`{"name":"test/project"}`), 0644) - require.NoError(t, err) - - err = UpdatePackages(projectDir, []string{"vendor/package"}) - // This will fail because composer update needs real dependencies - // but it validates the command runs - }) -} - -func TestLinkedPackage_Struct(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - pkg := LinkedPackage{ - Name: "vendor/package", - Path: "/path/to/package", - Version: "1.0.0", - } - - assert.Equal(t, "vendor/package", pkg.Name) - assert.Equal(t, "/path/to/package", pkg.Path) - assert.Equal(t, "1.0.0", pkg.Version) - }) -} - -func TestComposerRepository_Struct(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - repo := composerRepository{ - Type: "path", - URL: "/path/to/package", - Options: map[string]any{ - "symlink": true, - }, - } - - assert.Equal(t, "path", repo.Type) - assert.Equal(t, "/path/to/package", repo.URL) - assert.Equal(t, true, repo.Options["symlink"]) - }) -} diff --git a/pkg/php/php.go b/pkg/php/php.go deleted file mode 100644 index c00b92e..0000000 --- a/pkg/php/php.go +++ /dev/null @@ -1,397 +0,0 @@ -package php - -import ( - "context" - "io" - "os" - "sync" - "time" - - "github.com/host-uk/core/pkg/cli" -) - -// Options configures the development server. -type Options struct { - // Dir is the Laravel project directory. - Dir string - - // Services specifies which services to start. - // If empty, services are auto-detected. - Services []DetectedService - - // NoVite disables the Vite dev server. - NoVite bool - - // NoHorizon disables Laravel Horizon. - NoHorizon bool - - // NoReverb disables Laravel Reverb. - NoReverb bool - - // NoRedis disables the Redis server. - NoRedis bool - - // HTTPS enables HTTPS with mkcert certificates. - HTTPS bool - - // Domain is the domain for SSL certificates. - // Defaults to APP_URL from .env or "localhost". - Domain string - - // Ports for each service - FrankenPHPPort int - HTTPSPort int - VitePort int - ReverbPort int - RedisPort int -} - -// DevServer manages all development services. -type DevServer struct { - opts Options - services []Service - ctx context.Context - cancel context.CancelFunc - mu sync.RWMutex - running bool -} - -// NewDevServer creates a new development server manager. -func NewDevServer(opts Options) *DevServer { - return &DevServer{ - opts: opts, - services: make([]Service, 0), - } -} - -// Start starts all detected/configured services. -func (d *DevServer) Start(ctx context.Context, opts Options) error { - d.mu.Lock() - defer d.mu.Unlock() - - if d.running { - return cli.Err("dev server is already running") - } - - // Merge options - if opts.Dir != "" { - d.opts.Dir = opts.Dir - } - if d.opts.Dir == "" { - cwd, err := os.Getwd() - if err != nil { - return cli.WrapVerb(err, "get", "working directory") - } - d.opts.Dir = cwd - } - - // Verify this is a Laravel project - if !IsLaravelProject(d.opts.Dir) { - return cli.Err("not a Laravel project: %s", d.opts.Dir) - } - - // Create cancellable context - d.ctx, d.cancel = context.WithCancel(ctx) - - // Detect or use provided services - services := opts.Services - if len(services) == 0 { - services = DetectServices(d.opts.Dir) - } - - // Filter out disabled services - services = d.filterServices(services, opts) - - // Setup SSL if HTTPS is enabled - var certFile, keyFile string - if opts.HTTPS { - domain := opts.Domain - if domain == "" { - // Try to get domain from APP_URL - appURL := GetLaravelAppURL(d.opts.Dir) - if appURL != "" { - domain = ExtractDomainFromURL(appURL) - } - } - if domain == "" { - domain = "localhost" - } - - var err error - certFile, keyFile, err = SetupSSLIfNeeded(domain, SSLOptions{}) - if err != nil { - return cli.WrapVerb(err, "setup", "SSL") - } - } - - // Create services - d.services = make([]Service, 0) - - for _, svc := range services { - var service Service - - switch svc { - case ServiceFrankenPHP: - port := opts.FrankenPHPPort - if port == 0 { - port = 8000 - } - httpsPort := opts.HTTPSPort - if httpsPort == 0 { - httpsPort = 443 - } - service = NewFrankenPHPService(d.opts.Dir, FrankenPHPOptions{ - Port: port, - HTTPSPort: httpsPort, - HTTPS: opts.HTTPS, - CertFile: certFile, - KeyFile: keyFile, - }) - - case ServiceVite: - port := opts.VitePort - if port == 0 { - port = 5173 - } - service = NewViteService(d.opts.Dir, ViteOptions{ - Port: port, - }) - - case ServiceHorizon: - service = NewHorizonService(d.opts.Dir) - - case ServiceReverb: - port := opts.ReverbPort - if port == 0 { - port = 8080 - } - service = NewReverbService(d.opts.Dir, ReverbOptions{ - Port: port, - }) - - case ServiceRedis: - port := opts.RedisPort - if port == 0 { - port = 6379 - } - service = NewRedisService(d.opts.Dir, RedisOptions{ - Port: port, - }) - } - - if service != nil { - d.services = append(d.services, service) - } - } - - // Start all services - var startErrors []error - for _, svc := range d.services { - if err := svc.Start(d.ctx); err != nil { - startErrors = append(startErrors, cli.Err("%s: %v", svc.Name(), err)) - } - } - - if len(startErrors) > 0 { - // Stop any services that did start - for _, svc := range d.services { - svc.Stop() - } - return cli.Err("failed to start services: %v", startErrors) - } - - d.running = true - return nil -} - -// filterServices removes disabled services from the list. -func (d *DevServer) filterServices(services []DetectedService, opts Options) []DetectedService { - filtered := make([]DetectedService, 0) - - for _, svc := range services { - switch svc { - case ServiceVite: - if !opts.NoVite { - filtered = append(filtered, svc) - } - case ServiceHorizon: - if !opts.NoHorizon { - filtered = append(filtered, svc) - } - case ServiceReverb: - if !opts.NoReverb { - filtered = append(filtered, svc) - } - case ServiceRedis: - if !opts.NoRedis { - filtered = append(filtered, svc) - } - default: - filtered = append(filtered, svc) - } - } - - return filtered -} - -// Stop stops all services gracefully. -func (d *DevServer) Stop() error { - d.mu.Lock() - defer d.mu.Unlock() - - if !d.running { - return nil - } - - // Cancel context first - if d.cancel != nil { - d.cancel() - } - - // Stop all services in reverse order - var stopErrors []error - for i := len(d.services) - 1; i >= 0; i-- { - svc := d.services[i] - if err := svc.Stop(); err != nil { - stopErrors = append(stopErrors, cli.Err("%s: %v", svc.Name(), err)) - } - } - - d.running = false - - if len(stopErrors) > 0 { - return cli.Err("errors stopping services: %v", stopErrors) - } - - return nil -} - -// Logs returns a reader for the specified service's logs. -// If service is empty, returns unified logs from all services. -func (d *DevServer) Logs(service string, follow bool) (io.ReadCloser, error) { - d.mu.RLock() - defer d.mu.RUnlock() - - if service == "" { - // Return unified logs - return d.unifiedLogs(follow) - } - - // Find specific service - for _, svc := range d.services { - if svc.Name() == service { - return svc.Logs(follow) - } - } - - return nil, cli.Err("service not found: %s", service) -} - -// unifiedLogs creates a reader that combines logs from all services. -func (d *DevServer) unifiedLogs(follow bool) (io.ReadCloser, error) { - readers := make([]io.ReadCloser, 0) - - for _, svc := range d.services { - reader, err := svc.Logs(follow) - if err != nil { - // Close any readers we already opened - for _, r := range readers { - r.Close() - } - return nil, cli.Err("failed to get logs for %s: %v", svc.Name(), err) - } - readers = append(readers, reader) - } - - return newMultiServiceReader(d.services, readers, follow), nil -} - -// Status returns the status of all services. -func (d *DevServer) Status() []ServiceStatus { - d.mu.RLock() - defer d.mu.RUnlock() - - statuses := make([]ServiceStatus, 0, len(d.services)) - for _, svc := range d.services { - statuses = append(statuses, svc.Status()) - } - - return statuses -} - -// IsRunning returns true if the dev server is running. -func (d *DevServer) IsRunning() bool { - d.mu.RLock() - defer d.mu.RUnlock() - return d.running -} - -// Services returns the list of managed services. -func (d *DevServer) Services() []Service { - d.mu.RLock() - defer d.mu.RUnlock() - return d.services -} - -// multiServiceReader combines multiple service log readers. -type multiServiceReader struct { - services []Service - readers []io.ReadCloser - follow bool - closed bool - mu sync.RWMutex -} - -func newMultiServiceReader(services []Service, readers []io.ReadCloser, follow bool) *multiServiceReader { - return &multiServiceReader{ - services: services, - readers: readers, - follow: follow, - } -} - -func (m *multiServiceReader) Read(p []byte) (n int, err error) { - m.mu.RLock() - if m.closed { - m.mu.RUnlock() - return 0, io.EOF - } - m.mu.RUnlock() - - // Round-robin read from all readers - for i, reader := range m.readers { - buf := make([]byte, len(p)) - n, err := reader.Read(buf) - if n > 0 { - // Prefix with service name - prefix := cli.Sprintf("[%s] ", m.services[i].Name()) - copy(p, prefix) - copy(p[len(prefix):], buf[:n]) - return n + len(prefix), nil - } - if err != nil && err != io.EOF { - return 0, err - } - } - - if m.follow { - time.Sleep(100 * time.Millisecond) - return 0, nil - } - - return 0, io.EOF -} - -func (m *multiServiceReader) Close() error { - m.mu.Lock() - m.closed = true - m.mu.Unlock() - - var closeErr error - for _, reader := range m.readers { - if err := reader.Close(); err != nil && closeErr == nil { - closeErr = err - } - } - return closeErr -} diff --git a/pkg/php/php_test.go b/pkg/php/php_test.go deleted file mode 100644 index 7413a05..0000000 --- a/pkg/php/php_test.go +++ /dev/null @@ -1,644 +0,0 @@ -package php - -import ( - "context" - "io" - "os" - "path/filepath" - "strings" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestNewDevServer_Good(t *testing.T) { - t.Run("creates dev server with default options", func(t *testing.T) { - opts := Options{} - server := NewDevServer(opts) - - assert.NotNil(t, server) - assert.Empty(t, server.services) - assert.False(t, server.running) - }) - - t.Run("creates dev server with custom options", func(t *testing.T) { - opts := Options{ - Dir: "/tmp/test", - NoVite: true, - NoHorizon: true, - FrankenPHPPort: 9000, - } - server := NewDevServer(opts) - - assert.NotNil(t, server) - assert.Equal(t, "/tmp/test", server.opts.Dir) - assert.True(t, server.opts.NoVite) - }) -} - -func TestDevServer_IsRunning_Good(t *testing.T) { - t.Run("returns false when not running", func(t *testing.T) { - server := NewDevServer(Options{}) - assert.False(t, server.IsRunning()) - }) -} - -func TestDevServer_Status_Good(t *testing.T) { - t.Run("returns empty status when no services", func(t *testing.T) { - server := NewDevServer(Options{}) - statuses := server.Status() - assert.Empty(t, statuses) - }) -} - -func TestDevServer_Services_Good(t *testing.T) { - t.Run("returns empty services list initially", func(t *testing.T) { - server := NewDevServer(Options{}) - services := server.Services() - assert.Empty(t, services) - }) -} - -func TestDevServer_Stop_Good(t *testing.T) { - t.Run("returns nil when not running", func(t *testing.T) { - server := NewDevServer(Options{}) - err := server.Stop() - assert.NoError(t, err) - }) -} - -func TestDevServer_Start_Bad(t *testing.T) { - t.Run("fails when already running", func(t *testing.T) { - server := NewDevServer(Options{}) - server.running = true - - err := server.Start(context.Background(), Options{}) - assert.Error(t, err) - assert.Contains(t, err.Error(), "already running") - }) - - t.Run("fails for non-Laravel project", func(t *testing.T) { - dir := t.TempDir() - server := NewDevServer(Options{Dir: dir}) - - err := server.Start(context.Background(), Options{Dir: dir}) - assert.Error(t, err) - assert.Contains(t, err.Error(), "not a Laravel project") - }) -} - -func TestDevServer_Logs_Bad(t *testing.T) { - t.Run("fails for non-existent service", func(t *testing.T) { - server := NewDevServer(Options{}) - - _, err := server.Logs("nonexistent", false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "service not found") - }) -} - -func TestDevServer_filterServices_Good(t *testing.T) { - tests := []struct { - name string - services []DetectedService - opts Options - expected []DetectedService - }{ - { - name: "no filtering with default options", - services: []DetectedService{ServiceFrankenPHP, ServiceVite, ServiceHorizon}, - opts: Options{}, - expected: []DetectedService{ServiceFrankenPHP, ServiceVite, ServiceHorizon}, - }, - { - name: "filters Vite when NoVite is true", - services: []DetectedService{ServiceFrankenPHP, ServiceVite, ServiceHorizon}, - opts: Options{NoVite: true}, - expected: []DetectedService{ServiceFrankenPHP, ServiceHorizon}, - }, - { - name: "filters Horizon when NoHorizon is true", - services: []DetectedService{ServiceFrankenPHP, ServiceVite, ServiceHorizon}, - opts: Options{NoHorizon: true}, - expected: []DetectedService{ServiceFrankenPHP, ServiceVite}, - }, - { - name: "filters Reverb when NoReverb is true", - services: []DetectedService{ServiceFrankenPHP, ServiceReverb}, - opts: Options{NoReverb: true}, - expected: []DetectedService{ServiceFrankenPHP}, - }, - { - name: "filters Redis when NoRedis is true", - services: []DetectedService{ServiceFrankenPHP, ServiceRedis}, - opts: Options{NoRedis: true}, - expected: []DetectedService{ServiceFrankenPHP}, - }, - { - name: "filters multiple services", - services: []DetectedService{ServiceFrankenPHP, ServiceVite, ServiceHorizon, ServiceReverb, ServiceRedis}, - opts: Options{NoVite: true, NoHorizon: true, NoReverb: true, NoRedis: true}, - expected: []DetectedService{ServiceFrankenPHP}, - }, - { - name: "keeps unknown services", - services: []DetectedService{ServiceFrankenPHP}, - opts: Options{NoVite: true}, - expected: []DetectedService{ServiceFrankenPHP}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - server := NewDevServer(Options{}) - result := server.filterServices(tt.services, tt.opts) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestMultiServiceReader_Good(t *testing.T) { - t.Run("closes all readers on Close", func(t *testing.T) { - // Create mock readers using files - dir := t.TempDir() - file1, err := os.CreateTemp(dir, "log1-*.log") - require.NoError(t, err) - file1.WriteString("test1") - file1.Seek(0, 0) - - file2, err := os.CreateTemp(dir, "log2-*.log") - require.NoError(t, err) - file2.WriteString("test2") - file2.Seek(0, 0) - - // Create mock services - services := []Service{ - &FrankenPHPService{baseService: baseService{name: "svc1"}}, - &ViteService{baseService: baseService{name: "svc2"}}, - } - readers := []io.ReadCloser{file1, file2} - - reader := newMultiServiceReader(services, readers, false) - assert.NotNil(t, reader) - - err = reader.Close() - assert.NoError(t, err) - assert.True(t, reader.closed) - }) - - t.Run("returns EOF when closed", func(t *testing.T) { - reader := &multiServiceReader{closed: true} - buf := make([]byte, 10) - n, err := reader.Read(buf) - assert.Equal(t, 0, n) - assert.Equal(t, io.EOF, err) - }) -} - -func TestMultiServiceReader_Read_Good(t *testing.T) { - t.Run("reads from readers with service prefix", func(t *testing.T) { - dir := t.TempDir() - file1, err := os.CreateTemp(dir, "log-*.log") - require.NoError(t, err) - file1.WriteString("log content") - file1.Seek(0, 0) - - services := []Service{ - &FrankenPHPService{baseService: baseService{name: "TestService"}}, - } - readers := []io.ReadCloser{file1} - - reader := newMultiServiceReader(services, readers, false) - buf := make([]byte, 100) - n, err := reader.Read(buf) - - assert.NoError(t, err) - assert.Greater(t, n, 0) - result := string(buf[:n]) - assert.Contains(t, result, "[TestService]") - }) - - t.Run("returns EOF when all readers are exhausted in non-follow mode", func(t *testing.T) { - dir := t.TempDir() - file1, err := os.CreateTemp(dir, "log-*.log") - require.NoError(t, err) - file1.Close() // Empty file - - file1, err = os.Open(file1.Name()) - require.NoError(t, err) - - services := []Service{ - &FrankenPHPService{baseService: baseService{name: "TestService"}}, - } - readers := []io.ReadCloser{file1} - - reader := newMultiServiceReader(services, readers, false) - buf := make([]byte, 100) - n, err := reader.Read(buf) - - assert.Equal(t, 0, n) - assert.Equal(t, io.EOF, err) - }) -} - -func TestOptions_Good(t *testing.T) { - t.Run("all fields are accessible", func(t *testing.T) { - opts := Options{ - Dir: "/test", - Services: []DetectedService{ServiceFrankenPHP}, - NoVite: true, - NoHorizon: true, - NoReverb: true, - NoRedis: true, - HTTPS: true, - Domain: "test.local", - FrankenPHPPort: 8000, - HTTPSPort: 443, - VitePort: 5173, - ReverbPort: 8080, - RedisPort: 6379, - } - - assert.Equal(t, "/test", opts.Dir) - assert.Equal(t, []DetectedService{ServiceFrankenPHP}, opts.Services) - assert.True(t, opts.NoVite) - assert.True(t, opts.NoHorizon) - assert.True(t, opts.NoReverb) - assert.True(t, opts.NoRedis) - assert.True(t, opts.HTTPS) - assert.Equal(t, "test.local", opts.Domain) - assert.Equal(t, 8000, opts.FrankenPHPPort) - assert.Equal(t, 443, opts.HTTPSPort) - assert.Equal(t, 5173, opts.VitePort) - assert.Equal(t, 8080, opts.ReverbPort) - assert.Equal(t, 6379, opts.RedisPort) - }) -} - -func TestDevServer_StartStop_Integration(t *testing.T) { - t.Skip("requires PHP/FrankenPHP installed") - - dir := t.TempDir() - setupLaravelProject(t, dir) - - server := NewDevServer(Options{Dir: dir}) - ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) - defer cancel() - - err := server.Start(ctx, Options{Dir: dir}) - require.NoError(t, err) - assert.True(t, server.IsRunning()) - - err = server.Stop() - require.NoError(t, err) - assert.False(t, server.IsRunning()) -} - -// setupLaravelProject creates a minimal Laravel project structure for testing. -func setupLaravelProject(t *testing.T, dir string) { - t.Helper() - - // Create artisan file - err := os.WriteFile(filepath.Join(dir, "artisan"), []byte("#!/usr/bin/env php\n"), 0755) - require.NoError(t, err) - - // Create composer.json with Laravel - composerJSON := `{ - "name": "test/laravel-project", - "require": { - "php": "^8.2", - "laravel/framework": "^11.0", - "laravel/octane": "^2.0" - } - }` - err = os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) -} - -func TestDevServer_UnifiedLogs_Bad(t *testing.T) { - t.Run("returns error when service logs fail", func(t *testing.T) { - server := NewDevServer(Options{}) - - // Create a mock service that will fail to provide logs - mockService := &FrankenPHPService{ - baseService: baseService{ - name: "FailingService", - logPath: "", // No log path set will cause error - }, - } - server.services = []Service{mockService} - - _, err := server.Logs("", false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to get logs") - }) -} - -func TestDevServer_Logs_Good(t *testing.T) { - t.Run("finds specific service logs", func(t *testing.T) { - dir := t.TempDir() - logFile := filepath.Join(dir, "test.log") - err := os.WriteFile(logFile, []byte("test log content"), 0644) - require.NoError(t, err) - - server := NewDevServer(Options{}) - mockService := &FrankenPHPService{ - baseService: baseService{ - name: "TestService", - logPath: logFile, - }, - } - server.services = []Service{mockService} - - reader, err := server.Logs("TestService", false) - assert.NoError(t, err) - assert.NotNil(t, reader) - reader.Close() - }) -} - -func TestDevServer_MergeOptions_Good(t *testing.T) { - t.Run("start merges options correctly", func(t *testing.T) { - dir := t.TempDir() - server := NewDevServer(Options{Dir: "/original"}) - - // Setup a minimal non-Laravel project to trigger an error - // but still test the options merge happens first - err := server.Start(context.Background(), Options{Dir: dir}) - assert.Error(t, err) // Will fail because not Laravel project - // But the directory should have been merged - assert.Equal(t, dir, server.opts.Dir) - }) -} - -func TestDetectedService_Constants(t *testing.T) { - t.Run("all service constants are defined", func(t *testing.T) { - assert.Equal(t, DetectedService("frankenphp"), ServiceFrankenPHP) - assert.Equal(t, DetectedService("vite"), ServiceVite) - assert.Equal(t, DetectedService("horizon"), ServiceHorizon) - assert.Equal(t, DetectedService("reverb"), ServiceReverb) - assert.Equal(t, DetectedService("redis"), ServiceRedis) - }) -} - -func TestDevServer_HTTPSSetup(t *testing.T) { - t.Run("extracts domain from APP_URL when HTTPS enabled", func(t *testing.T) { - dir := t.TempDir() - - // Create Laravel project - err := os.WriteFile(filepath.Join(dir, "artisan"), []byte("#!/usr/bin/env php\n"), 0755) - require.NoError(t, err) - - composerJSON := `{ - "require": { - "laravel/framework": "^11.0", - "laravel/octane": "^2.0" - } - }` - err = os.WriteFile(filepath.Join(dir, "composer.json"), []byte(composerJSON), 0644) - require.NoError(t, err) - - // Create .env with APP_URL - envContent := "APP_URL=https://myapp.test" - err = os.WriteFile(filepath.Join(dir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - // Verify we can extract the domain - url := GetLaravelAppURL(dir) - domain := ExtractDomainFromURL(url) - assert.Equal(t, "myapp.test", domain) - }) -} - -func TestDevServer_PortDefaults(t *testing.T) { - t.Run("uses default ports when not specified", func(t *testing.T) { - // This tests the logic in Start() for default port assignment - // We verify the constants/defaults by checking what would be created - - // FrankenPHP default port is 8000 - svc := NewFrankenPHPService("/tmp", FrankenPHPOptions{}) - assert.Equal(t, 8000, svc.port) - - // Vite default port is 5173 - vite := NewViteService("/tmp", ViteOptions{}) - assert.Equal(t, 5173, vite.port) - - // Reverb default port is 8080 - reverb := NewReverbService("/tmp", ReverbOptions{}) - assert.Equal(t, 8080, reverb.port) - - // Redis default port is 6379 - redis := NewRedisService("/tmp", RedisOptions{}) - assert.Equal(t, 6379, redis.port) - }) -} - -func TestDevServer_ServiceCreation(t *testing.T) { - t.Run("creates correct services based on detected services", func(t *testing.T) { - // Test that the switch statement in Start() creates the right service types - services := []DetectedService{ - ServiceFrankenPHP, - ServiceVite, - ServiceHorizon, - ServiceReverb, - ServiceRedis, - } - - // Verify each service type string - expected := []string{"frankenphp", "vite", "horizon", "reverb", "redis"} - for i, svc := range services { - assert.Equal(t, expected[i], string(svc)) - } - }) -} - -func TestMultiServiceReader_CloseError(t *testing.T) { - t.Run("returns first close error", func(t *testing.T) { - dir := t.TempDir() - - // Create a real file that we can close - file1, err := os.CreateTemp(dir, "log-*.log") - require.NoError(t, err) - file1Name := file1.Name() - file1.Close() - - // Reopen for reading - file1, err = os.Open(file1Name) - require.NoError(t, err) - - services := []Service{ - &FrankenPHPService{baseService: baseService{name: "svc1"}}, - } - readers := []io.ReadCloser{file1} - - reader := newMultiServiceReader(services, readers, false) - err = reader.Close() - assert.NoError(t, err) - - // Second close should still work (files already closed) - // The closed flag prevents double-processing - assert.True(t, reader.closed) - }) -} - -func TestMultiServiceReader_FollowMode(t *testing.T) { - t.Run("returns 0 bytes without error in follow mode when no data", func(t *testing.T) { - dir := t.TempDir() - file1, err := os.CreateTemp(dir, "log-*.log") - require.NoError(t, err) - file1Name := file1.Name() - file1.Close() - - // Reopen for reading (empty file) - file1, err = os.Open(file1Name) - require.NoError(t, err) - - services := []Service{ - &FrankenPHPService{baseService: baseService{name: "svc1"}}, - } - readers := []io.ReadCloser{file1} - - reader := newMultiServiceReader(services, readers, true) // follow=true - - // Use a channel to timeout the read since follow mode waits - done := make(chan bool) - go func() { - buf := make([]byte, 100) - n, err := reader.Read(buf) - // In follow mode, should return 0 bytes and nil error (waiting for more data) - assert.Equal(t, 0, n) - assert.NoError(t, err) - done <- true - }() - - select { - case <-done: - // Good, read completed - case <-time.After(500 * time.Millisecond): - // Also acceptable - follow mode is waiting - } - - reader.Close() - }) -} - -func TestGetLaravelAppURL_Bad(t *testing.T) { - t.Run("no .env file", func(t *testing.T) { - dir := t.TempDir() - assert.Equal(t, "", GetLaravelAppURL(dir)) - }) - - t.Run("no APP_URL in .env", func(t *testing.T) { - dir := t.TempDir() - envContent := "APP_NAME=Test\nAPP_ENV=local" - err := os.WriteFile(filepath.Join(dir, ".env"), []byte(envContent), 0644) - require.NoError(t, err) - - assert.Equal(t, "", GetLaravelAppURL(dir)) - }) -} - -func TestExtractDomainFromURL_Edge(t *testing.T) { - tests := []struct { - name string - url string - expected string - }{ - {"empty string", "", ""}, - {"just domain", "example.com", "example.com"}, - {"http only", "http://", ""}, - {"https only", "https://", ""}, - {"domain with trailing slash", "https://example.com/", "example.com"}, - {"complex path", "https://example.com:8080/path/to/page?query=1", "example.com"}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Strip protocol - result := ExtractDomainFromURL(tt.url) - if tt.url != "" && !strings.HasPrefix(tt.url, "http://") && !strings.HasPrefix(tt.url, "https://") && !strings.Contains(tt.url, ":") && !strings.Contains(tt.url, "/") { - assert.Equal(t, tt.expected, result) - } - }) - } -} - -func TestDevServer_StatusWithServices(t *testing.T) { - t.Run("returns statuses for all services", func(t *testing.T) { - server := NewDevServer(Options{}) - - // Add mock services - server.services = []Service{ - &FrankenPHPService{baseService: baseService{name: "svc1", running: true, port: 8000}}, - &ViteService{baseService: baseService{name: "svc2", running: false, port: 5173}}, - } - - statuses := server.Status() - assert.Len(t, statuses, 2) - assert.Equal(t, "svc1", statuses[0].Name) - assert.True(t, statuses[0].Running) - assert.Equal(t, "svc2", statuses[1].Name) - assert.False(t, statuses[1].Running) - }) -} - -func TestDevServer_ServicesReturnsAll(t *testing.T) { - t.Run("returns all services", func(t *testing.T) { - server := NewDevServer(Options{}) - - // Add mock services - server.services = []Service{ - &FrankenPHPService{baseService: baseService{name: "svc1"}}, - &ViteService{baseService: baseService{name: "svc2"}}, - &HorizonService{baseService: baseService{name: "svc3"}}, - } - - services := server.Services() - assert.Len(t, services, 3) - }) -} - -func TestDevServer_StopWithCancel(t *testing.T) { - t.Run("calls cancel when running", func(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - server := NewDevServer(Options{}) - server.running = true - server.cancel = cancel - server.ctx = ctx - - // Add a mock service that won't error - server.services = []Service{ - &FrankenPHPService{baseService: baseService{name: "svc1", running: false}}, - } - - err := server.Stop() - assert.NoError(t, err) - assert.False(t, server.running) - }) -} - -func TestMultiServiceReader_CloseWithErrors(t *testing.T) { - t.Run("handles multiple close errors", func(t *testing.T) { - dir := t.TempDir() - - // Create files - file1, err := os.CreateTemp(dir, "log1-*.log") - require.NoError(t, err) - file2, err := os.CreateTemp(dir, "log2-*.log") - require.NoError(t, err) - - services := []Service{ - &FrankenPHPService{baseService: baseService{name: "svc1"}}, - &ViteService{baseService: baseService{name: "svc2"}}, - } - readers := []io.ReadCloser{file1, file2} - - reader := newMultiServiceReader(services, readers, false) - - // Close successfully - err = reader.Close() - assert.NoError(t, err) - }) -} diff --git a/pkg/php/quality.go b/pkg/php/quality.go deleted file mode 100644 index 31c71cd..0000000 --- a/pkg/php/quality.go +++ /dev/null @@ -1,946 +0,0 @@ -package php - -import ( - "context" - "encoding/json" - "io" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/cli" -) - -// FormatOptions configures PHP code formatting. -type FormatOptions struct { - // Dir is the project directory (defaults to current working directory). - Dir string - - // Fix automatically fixes formatting issues. - Fix bool - - // Diff shows a diff of changes instead of modifying files. - Diff bool - - // Paths limits formatting to specific paths. - Paths []string - - // Output is the writer for output (defaults to os.Stdout). - Output io.Writer -} - -// AnalyseOptions configures PHP static analysis. -type AnalyseOptions struct { - // Dir is the project directory (defaults to current working directory). - Dir string - - // Level is the PHPStan analysis level (0-9). - Level int - - // Paths limits analysis to specific paths. - Paths []string - - // Memory is the memory limit for analysis (e.g., "2G"). - Memory string - - // Output is the writer for output (defaults to os.Stdout). - Output io.Writer -} - -// FormatterType represents the detected formatter. -type FormatterType string - -const ( - FormatterPint FormatterType = "pint" -) - -// AnalyserType represents the detected static analyser. -type AnalyserType string - -const ( - AnalyserPHPStan AnalyserType = "phpstan" - AnalyserLarastan AnalyserType = "larastan" -) - -// DetectFormatter detects which formatter is available in the project. -func DetectFormatter(dir string) (FormatterType, bool) { - // Check for Pint config - pintConfig := filepath.Join(dir, "pint.json") - if _, err := os.Stat(pintConfig); err == nil { - return FormatterPint, true - } - - // Check for vendor binary - pintBin := filepath.Join(dir, "vendor", "bin", "pint") - if _, err := os.Stat(pintBin); err == nil { - return FormatterPint, true - } - - return "", false -} - -// DetectAnalyser detects which static analyser is available in the project. -func DetectAnalyser(dir string) (AnalyserType, bool) { - // Check for PHPStan config - phpstanConfig := filepath.Join(dir, "phpstan.neon") - phpstanDistConfig := filepath.Join(dir, "phpstan.neon.dist") - - hasConfig := false - if _, err := os.Stat(phpstanConfig); err == nil { - hasConfig = true - } - if _, err := os.Stat(phpstanDistConfig); err == nil { - hasConfig = true - } - - // Check for vendor binary - phpstanBin := filepath.Join(dir, "vendor", "bin", "phpstan") - hasBin := false - if _, err := os.Stat(phpstanBin); err == nil { - hasBin = true - } - - if hasConfig || hasBin { - // Check if it's Larastan (Laravel-specific PHPStan) - larastanPath := filepath.Join(dir, "vendor", "larastan", "larastan") - if _, err := os.Stat(larastanPath); err == nil { - return AnalyserLarastan, true - } - // Also check nunomaduro/larastan - larastanPath2 := filepath.Join(dir, "vendor", "nunomaduro", "larastan") - if _, err := os.Stat(larastanPath2); err == nil { - return AnalyserLarastan, true - } - return AnalyserPHPStan, true - } - - return "", false -} - -// Format runs Laravel Pint to format PHP code. -func Format(ctx context.Context, opts FormatOptions) error { - if opts.Dir == "" { - cwd, err := os.Getwd() - if err != nil { - return cli.WrapVerb(err, "get", "working directory") - } - opts.Dir = cwd - } - - if opts.Output == nil { - opts.Output = os.Stdout - } - - // Check if formatter is available - formatter, found := DetectFormatter(opts.Dir) - if !found { - return cli.Err("no formatter found (install Laravel Pint: composer require laravel/pint --dev)") - } - - var cmdName string - var args []string - - switch formatter { - case FormatterPint: - cmdName, args = buildPintCommand(opts) - } - - cmd := exec.CommandContext(ctx, cmdName, args...) - cmd.Dir = opts.Dir - cmd.Stdout = opts.Output - cmd.Stderr = opts.Output - - return cmd.Run() -} - -// Analyse runs PHPStan or Larastan for static analysis. -func Analyse(ctx context.Context, opts AnalyseOptions) error { - if opts.Dir == "" { - cwd, err := os.Getwd() - if err != nil { - return cli.WrapVerb(err, "get", "working directory") - } - opts.Dir = cwd - } - - if opts.Output == nil { - opts.Output = os.Stdout - } - - // Check if analyser is available - analyser, found := DetectAnalyser(opts.Dir) - if !found { - return cli.Err("no static analyser found (install PHPStan: composer require phpstan/phpstan --dev)") - } - - var cmdName string - var args []string - - switch analyser { - case AnalyserPHPStan, AnalyserLarastan: - cmdName, args = buildPHPStanCommand(opts) - } - - cmd := exec.CommandContext(ctx, cmdName, args...) - cmd.Dir = opts.Dir - cmd.Stdout = opts.Output - cmd.Stderr = opts.Output - - return cmd.Run() -} - -// buildPintCommand builds the command for running Laravel Pint. -func buildPintCommand(opts FormatOptions) (string, []string) { - // Check for vendor binary first - vendorBin := filepath.Join(opts.Dir, "vendor", "bin", "pint") - cmdName := "pint" - if _, err := os.Stat(vendorBin); err == nil { - cmdName = vendorBin - } - - var args []string - - if !opts.Fix { - args = append(args, "--test") - } - - if opts.Diff { - args = append(args, "--diff") - } - - // Add specific paths if provided - args = append(args, opts.Paths...) - - return cmdName, args -} - -// buildPHPStanCommand builds the command for running PHPStan. -func buildPHPStanCommand(opts AnalyseOptions) (string, []string) { - // Check for vendor binary first - vendorBin := filepath.Join(opts.Dir, "vendor", "bin", "phpstan") - cmdName := "phpstan" - if _, err := os.Stat(vendorBin); err == nil { - cmdName = vendorBin - } - - args := []string{"analyse"} - - if opts.Level > 0 { - args = append(args, "--level", cli.Sprintf("%d", opts.Level)) - } - - if opts.Memory != "" { - args = append(args, "--memory-limit", opts.Memory) - } - - // Add specific paths if provided - args = append(args, opts.Paths...) - - return cmdName, args -} - -// ============================================================================= -// Psalm Static Analysis -// ============================================================================= - -// PsalmOptions configures Psalm static analysis. -type PsalmOptions struct { - Dir string - Level int // Error level (1=strictest, 8=most lenient) - Fix bool // Auto-fix issues where possible - Baseline bool // Generate/update baseline file - ShowInfo bool // Show info-level issues - Output io.Writer -} - -// PsalmType represents the detected Psalm configuration. -type PsalmType string - -const ( - PsalmStandard PsalmType = "psalm" -) - -// DetectPsalm checks if Psalm is available in the project. -func DetectPsalm(dir string) (PsalmType, bool) { - // Check for psalm.xml config - psalmConfig := filepath.Join(dir, "psalm.xml") - psalmDistConfig := filepath.Join(dir, "psalm.xml.dist") - - hasConfig := false - if _, err := os.Stat(psalmConfig); err == nil { - hasConfig = true - } - if _, err := os.Stat(psalmDistConfig); err == nil { - hasConfig = true - } - - // Check for vendor binary - psalmBin := filepath.Join(dir, "vendor", "bin", "psalm") - if _, err := os.Stat(psalmBin); err == nil { - return PsalmStandard, true - } - - if hasConfig { - return PsalmStandard, true - } - - return "", false -} - -// RunPsalm runs Psalm static analysis. -func RunPsalm(ctx context.Context, opts PsalmOptions) error { - if opts.Dir == "" { - cwd, err := os.Getwd() - if err != nil { - return cli.WrapVerb(err, "get", "working directory") - } - opts.Dir = cwd - } - - if opts.Output == nil { - opts.Output = os.Stdout - } - - // Build command - vendorBin := filepath.Join(opts.Dir, "vendor", "bin", "psalm") - cmdName := "psalm" - if _, err := os.Stat(vendorBin); err == nil { - cmdName = vendorBin - } - - args := []string{"--no-progress"} - - if opts.Level > 0 && opts.Level <= 8 { - args = append(args, cli.Sprintf("--error-level=%d", opts.Level)) - } - - if opts.Fix { - args = append(args, "--alter", "--issues=all") - } - - if opts.Baseline { - args = append(args, "--set-baseline=psalm-baseline.xml") - } - - if opts.ShowInfo { - args = append(args, "--show-info=true") - } - - cmd := exec.CommandContext(ctx, cmdName, args...) - cmd.Dir = opts.Dir - cmd.Stdout = opts.Output - cmd.Stderr = opts.Output - - return cmd.Run() -} - -// ============================================================================= -// Security Audit -// ============================================================================= - -// AuditOptions configures dependency security auditing. -type AuditOptions struct { - Dir string - JSON bool // Output in JSON format - Fix bool // Auto-fix vulnerabilities (npm only) - Output io.Writer -} - -// AuditResult holds the results of a security audit. -type AuditResult struct { - Tool string - Vulnerabilities int - Advisories []AuditAdvisory - Error error -} - -// AuditAdvisory represents a single security advisory. -type AuditAdvisory struct { - Package string - Severity string - Title string - URL string - Identifiers []string -} - -// RunAudit runs security audits on dependencies. -func RunAudit(ctx context.Context, opts AuditOptions) ([]AuditResult, error) { - if opts.Dir == "" { - cwd, err := os.Getwd() - if err != nil { - return nil, cli.WrapVerb(err, "get", "working directory") - } - opts.Dir = cwd - } - - if opts.Output == nil { - opts.Output = os.Stdout - } - - var results []AuditResult - - // Run composer audit - composerResult := runComposerAudit(ctx, opts) - results = append(results, composerResult) - - // Run npm audit if package.json exists - if _, err := os.Stat(filepath.Join(opts.Dir, "package.json")); err == nil { - npmResult := runNpmAudit(ctx, opts) - results = append(results, npmResult) - } - - return results, nil -} - -func runComposerAudit(ctx context.Context, opts AuditOptions) AuditResult { - result := AuditResult{Tool: "composer"} - - args := []string{"audit", "--format=json"} - - cmd := exec.CommandContext(ctx, "composer", args...) - cmd.Dir = opts.Dir - - output, err := cmd.Output() - if err != nil { - // composer audit returns non-zero if vulnerabilities found - if exitErr, ok := err.(*exec.ExitError); ok { - output = append(output, exitErr.Stderr...) - } - } - - // Parse JSON output - var auditData struct { - Advisories map[string][]struct { - Title string `json:"title"` - Link string `json:"link"` - CVE string `json:"cve"` - AffectedRanges string `json:"affectedVersions"` - } `json:"advisories"` - } - - if jsonErr := json.Unmarshal(output, &auditData); jsonErr == nil { - for pkg, advisories := range auditData.Advisories { - for _, adv := range advisories { - result.Advisories = append(result.Advisories, AuditAdvisory{ - Package: pkg, - Title: adv.Title, - URL: adv.Link, - Identifiers: []string{adv.CVE}, - }) - } - } - result.Vulnerabilities = len(result.Advisories) - } else if err != nil { - result.Error = err - } - - return result -} - -func runNpmAudit(ctx context.Context, opts AuditOptions) AuditResult { - result := AuditResult{Tool: "npm"} - - args := []string{"audit", "--json"} - if opts.Fix { - args = []string{"audit", "fix"} - } - - cmd := exec.CommandContext(ctx, "npm", args...) - cmd.Dir = opts.Dir - - output, err := cmd.Output() - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - output = append(output, exitErr.Stderr...) - } - } - - if !opts.Fix { - // Parse JSON output - var auditData struct { - Metadata struct { - Vulnerabilities struct { - Total int `json:"total"` - } `json:"vulnerabilities"` - } `json:"metadata"` - Vulnerabilities map[string]struct { - Severity string `json:"severity"` - Via []any `json:"via"` - } `json:"vulnerabilities"` - } - - if jsonErr := json.Unmarshal(output, &auditData); jsonErr == nil { - result.Vulnerabilities = auditData.Metadata.Vulnerabilities.Total - for pkg, vuln := range auditData.Vulnerabilities { - result.Advisories = append(result.Advisories, AuditAdvisory{ - Package: pkg, - Severity: vuln.Severity, - }) - } - } else if err != nil { - result.Error = err - } - } - - return result -} - -// ============================================================================= -// Rector Automated Refactoring -// ============================================================================= - -// RectorOptions configures Rector code refactoring. -type RectorOptions struct { - Dir string - Fix bool // Apply changes (default is dry-run) - Diff bool // Show detailed diff - ClearCache bool // Clear cache before running - Output io.Writer -} - -// DetectRector checks if Rector is available in the project. -func DetectRector(dir string) bool { - // Check for rector.php config - rectorConfig := filepath.Join(dir, "rector.php") - if _, err := os.Stat(rectorConfig); err == nil { - return true - } - - // Check for vendor binary - rectorBin := filepath.Join(dir, "vendor", "bin", "rector") - if _, err := os.Stat(rectorBin); err == nil { - return true - } - - return false -} - -// RunRector runs Rector for automated code refactoring. -func RunRector(ctx context.Context, opts RectorOptions) error { - if opts.Dir == "" { - cwd, err := os.Getwd() - if err != nil { - return cli.WrapVerb(err, "get", "working directory") - } - opts.Dir = cwd - } - - if opts.Output == nil { - opts.Output = os.Stdout - } - - // Build command - vendorBin := filepath.Join(opts.Dir, "vendor", "bin", "rector") - cmdName := "rector" - if _, err := os.Stat(vendorBin); err == nil { - cmdName = vendorBin - } - - args := []string{"process"} - - if !opts.Fix { - args = append(args, "--dry-run") - } - - if opts.Diff { - args = append(args, "--output-format", "diff") - } - - if opts.ClearCache { - args = append(args, "--clear-cache") - } - - cmd := exec.CommandContext(ctx, cmdName, args...) - cmd.Dir = opts.Dir - cmd.Stdout = opts.Output - cmd.Stderr = opts.Output - - return cmd.Run() -} - -// ============================================================================= -// Infection Mutation Testing -// ============================================================================= - -// InfectionOptions configures Infection mutation testing. -type InfectionOptions struct { - Dir string - MinMSI int // Minimum mutation score indicator (0-100) - MinCoveredMSI int // Minimum covered mutation score (0-100) - Threads int // Number of parallel threads - Filter string // Filter files by pattern - OnlyCovered bool // Only mutate covered code - Output io.Writer -} - -// DetectInfection checks if Infection is available in the project. -func DetectInfection(dir string) bool { - // Check for infection config files - configs := []string{"infection.json", "infection.json5", "infection.json.dist"} - for _, config := range configs { - if _, err := os.Stat(filepath.Join(dir, config)); err == nil { - return true - } - } - - // Check for vendor binary - infectionBin := filepath.Join(dir, "vendor", "bin", "infection") - if _, err := os.Stat(infectionBin); err == nil { - return true - } - - return false -} - -// RunInfection runs Infection mutation testing. -func RunInfection(ctx context.Context, opts InfectionOptions) error { - if opts.Dir == "" { - cwd, err := os.Getwd() - if err != nil { - return cli.WrapVerb(err, "get", "working directory") - } - opts.Dir = cwd - } - - if opts.Output == nil { - opts.Output = os.Stdout - } - - // Build command - vendorBin := filepath.Join(opts.Dir, "vendor", "bin", "infection") - cmdName := "infection" - if _, err := os.Stat(vendorBin); err == nil { - cmdName = vendorBin - } - - var args []string - - // Set defaults - minMSI := opts.MinMSI - if minMSI == 0 { - minMSI = 50 - } - minCoveredMSI := opts.MinCoveredMSI - if minCoveredMSI == 0 { - minCoveredMSI = 70 - } - threads := opts.Threads - if threads == 0 { - threads = 4 - } - - args = append(args, cli.Sprintf("--min-msi=%d", minMSI)) - args = append(args, cli.Sprintf("--min-covered-msi=%d", minCoveredMSI)) - args = append(args, cli.Sprintf("--threads=%d", threads)) - - if opts.Filter != "" { - args = append(args, "--filter="+opts.Filter) - } - - if opts.OnlyCovered { - args = append(args, "--only-covered") - } - - cmd := exec.CommandContext(ctx, cmdName, args...) - cmd.Dir = opts.Dir - cmd.Stdout = opts.Output - cmd.Stderr = opts.Output - - return cmd.Run() -} - -// ============================================================================= -// QA Pipeline -// ============================================================================= - -// QAOptions configures the full QA pipeline. -type QAOptions struct { - Dir string - Quick bool // Only run quick checks - Full bool // Run all stages including slow checks - Fix bool // Auto-fix issues where possible - JSON bool // Output results as JSON -} - -// QAStage represents a stage in the QA pipeline. -type QAStage string - -const ( - QAStageQuick QAStage = "quick" - QAStageStandard QAStage = "standard" - QAStageFull QAStage = "full" -) - -// QACheckResult holds the result of a single QA check. -type QACheckResult struct { - Name string - Stage QAStage - Passed bool - Duration string - Error error - Output string -} - -// QAResult holds the results of the full QA pipeline. -type QAResult struct { - Stages []QAStage - Checks []QACheckResult - Passed bool - Summary string -} - -// GetQAStages returns the stages to run based on options. -func GetQAStages(opts QAOptions) []QAStage { - if opts.Quick { - return []QAStage{QAStageQuick} - } - if opts.Full { - return []QAStage{QAStageQuick, QAStageStandard, QAStageFull} - } - // Default: quick + standard - return []QAStage{QAStageQuick, QAStageStandard} -} - -// GetQAChecks returns the checks for a given stage. -func GetQAChecks(dir string, stage QAStage) []string { - switch stage { - case QAStageQuick: - checks := []string{"audit", "fmt", "stan"} - return checks - case QAStageStandard: - checks := []string{} - if _, found := DetectPsalm(dir); found { - checks = append(checks, "psalm") - } - checks = append(checks, "test") - return checks - case QAStageFull: - checks := []string{} - if DetectRector(dir) { - checks = append(checks, "rector") - } - if DetectInfection(dir) { - checks = append(checks, "infection") - } - return checks - } - return nil -} - -// ============================================================================= -// Security Checks -// ============================================================================= - -// SecurityOptions configures security scanning. -type SecurityOptions struct { - Dir string - Severity string // Minimum severity (critical, high, medium, low) - JSON bool // Output in JSON format - SARIF bool // Output in SARIF format - URL string // URL to check HTTP headers (optional) - Output io.Writer -} - -// SecurityResult holds the results of security scanning. -type SecurityResult struct { - Checks []SecurityCheck - Summary SecuritySummary -} - -// SecurityCheck represents a single security check result. -type SecurityCheck struct { - ID string - Name string - Description string - Severity string - Passed bool - Message string - Fix string - CWE string -} - -// SecuritySummary summarizes security check results. -type SecuritySummary struct { - Total int - Passed int - Critical int - High int - Medium int - Low int -} - -// RunSecurityChecks runs security checks on the project. -func RunSecurityChecks(ctx context.Context, opts SecurityOptions) (*SecurityResult, error) { - if opts.Dir == "" { - cwd, err := os.Getwd() - if err != nil { - return nil, cli.WrapVerb(err, "get", "working directory") - } - opts.Dir = cwd - } - - result := &SecurityResult{} - - // Run composer audit - auditResults, _ := RunAudit(ctx, AuditOptions{Dir: opts.Dir}) - for _, audit := range auditResults { - check := SecurityCheck{ - ID: audit.Tool + "_audit", - Name: strings.Title(audit.Tool) + " Security Audit", - Description: "Check " + audit.Tool + " dependencies for vulnerabilities", - Severity: "critical", - Passed: audit.Vulnerabilities == 0 && audit.Error == nil, - CWE: "CWE-1395", - } - if !check.Passed { - check.Message = cli.Sprintf("Found %d vulnerabilities", audit.Vulnerabilities) - } - result.Checks = append(result.Checks, check) - } - - // Check .env file for security issues - envChecks := runEnvSecurityChecks(opts.Dir) - result.Checks = append(result.Checks, envChecks...) - - // Check filesystem security - fsChecks := runFilesystemSecurityChecks(opts.Dir) - result.Checks = append(result.Checks, fsChecks...) - - // Calculate summary - for _, check := range result.Checks { - result.Summary.Total++ - if check.Passed { - result.Summary.Passed++ - } else { - switch check.Severity { - case "critical": - result.Summary.Critical++ - case "high": - result.Summary.High++ - case "medium": - result.Summary.Medium++ - case "low": - result.Summary.Low++ - } - } - } - - return result, nil -} - -func runEnvSecurityChecks(dir string) []SecurityCheck { - var checks []SecurityCheck - - envPath := filepath.Join(dir, ".env") - envContent, err := os.ReadFile(envPath) - if err != nil { - return checks - } - - envLines := strings.Split(string(envContent), "\n") - envMap := make(map[string]string) - for _, line := range envLines { - line = strings.TrimSpace(line) - if line == "" || strings.HasPrefix(line, "#") { - continue - } - parts := strings.SplitN(line, "=", 2) - if len(parts) == 2 { - envMap[parts[0]] = parts[1] - } - } - - // Check APP_DEBUG - if debug, ok := envMap["APP_DEBUG"]; ok { - check := SecurityCheck{ - ID: "debug_mode", - Name: "Debug Mode Disabled", - Description: "APP_DEBUG should be false in production", - Severity: "critical", - Passed: strings.ToLower(debug) != "true", - CWE: "CWE-215", - } - if !check.Passed { - check.Message = "Debug mode exposes sensitive information" - check.Fix = "Set APP_DEBUG=false in .env" - } - checks = append(checks, check) - } - - // Check APP_KEY - if key, ok := envMap["APP_KEY"]; ok { - check := SecurityCheck{ - ID: "app_key_set", - Name: "Application Key Set", - Description: "APP_KEY must be set and valid", - Severity: "critical", - Passed: len(key) >= 32, - CWE: "CWE-321", - } - if !check.Passed { - check.Message = "Missing or weak encryption key" - check.Fix = "Run: php artisan key:generate" - } - checks = append(checks, check) - } - - // Check APP_URL for HTTPS - if url, ok := envMap["APP_URL"]; ok { - check := SecurityCheck{ - ID: "https_enforced", - Name: "HTTPS Enforced", - Description: "APP_URL should use HTTPS in production", - Severity: "high", - Passed: strings.HasPrefix(url, "https://"), - CWE: "CWE-319", - } - if !check.Passed { - check.Message = "Application not using HTTPS" - check.Fix = "Update APP_URL to use https://" - } - checks = append(checks, check) - } - - return checks -} - -func runFilesystemSecurityChecks(dir string) []SecurityCheck { - var checks []SecurityCheck - - // Check .env not in public - publicEnvPaths := []string{"public/.env", "public_html/.env"} - for _, path := range publicEnvPaths { - fullPath := filepath.Join(dir, path) - if _, err := os.Stat(fullPath); err == nil { - checks = append(checks, SecurityCheck{ - ID: "env_not_public", - Name: ".env Not Publicly Accessible", - Description: ".env file should not be in public directory", - Severity: "critical", - Passed: false, - Message: "Environment file exposed to web at " + path, - CWE: "CWE-538", - }) - } - } - - // Check .git not in public - publicGitPaths := []string{"public/.git", "public_html/.git"} - for _, path := range publicGitPaths { - fullPath := filepath.Join(dir, path) - if _, err := os.Stat(fullPath); err == nil { - checks = append(checks, SecurityCheck{ - ID: "git_not_public", - Name: ".git Not Publicly Accessible", - Description: ".git directory should not be in public", - Severity: "critical", - Passed: false, - Message: "Git repository exposed to web (source code leak)", - CWE: "CWE-538", - }) - } - } - - return checks -} diff --git a/pkg/php/quality_extended_test.go b/pkg/php/quality_extended_test.go deleted file mode 100644 index 3841edc..0000000 --- a/pkg/php/quality_extended_test.go +++ /dev/null @@ -1,304 +0,0 @@ -package php - -import ( - "context" - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestFormatOptions_Struct(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - opts := FormatOptions{ - Dir: "/project", - Fix: true, - Diff: true, - Paths: []string{"app", "tests"}, - Output: os.Stdout, - } - - assert.Equal(t, "/project", opts.Dir) - assert.True(t, opts.Fix) - assert.True(t, opts.Diff) - assert.Equal(t, []string{"app", "tests"}, opts.Paths) - assert.NotNil(t, opts.Output) - }) -} - -func TestAnalyseOptions_Struct(t *testing.T) { - t.Run("all fields accessible", func(t *testing.T) { - opts := AnalyseOptions{ - Dir: "/project", - Level: 5, - Paths: []string{"src"}, - Memory: "2G", - Output: os.Stdout, - } - - assert.Equal(t, "/project", opts.Dir) - assert.Equal(t, 5, opts.Level) - assert.Equal(t, []string{"src"}, opts.Paths) - assert.Equal(t, "2G", opts.Memory) - assert.NotNil(t, opts.Output) - }) -} - -func TestFormatterType_Constants(t *testing.T) { - t.Run("constants are defined", func(t *testing.T) { - assert.Equal(t, FormatterType("pint"), FormatterPint) - }) -} - -func TestAnalyserType_Constants(t *testing.T) { - t.Run("constants are defined", func(t *testing.T) { - assert.Equal(t, AnalyserType("phpstan"), AnalyserPHPStan) - assert.Equal(t, AnalyserType("larastan"), AnalyserLarastan) - }) -} - -func TestDetectFormatter_Extended(t *testing.T) { - t.Run("returns not found for empty directory", func(t *testing.T) { - dir := t.TempDir() - _, found := DetectFormatter(dir) - assert.False(t, found) - }) - - t.Run("prefers pint.json over vendor binary", func(t *testing.T) { - dir := t.TempDir() - - // Create pint.json - err := os.WriteFile(filepath.Join(dir, "pint.json"), []byte("{}"), 0644) - require.NoError(t, err) - - formatter, found := DetectFormatter(dir) - assert.True(t, found) - assert.Equal(t, FormatterPint, formatter) - }) -} - -func TestDetectAnalyser_Extended(t *testing.T) { - t.Run("returns not found for empty directory", func(t *testing.T) { - dir := t.TempDir() - _, found := DetectAnalyser(dir) - assert.False(t, found) - }) - - t.Run("detects phpstan from vendor binary alone", func(t *testing.T) { - dir := t.TempDir() - - // Create vendor binary - binDir := filepath.Join(dir, "vendor", "bin") - err := os.MkdirAll(binDir, 0755) - require.NoError(t, err) - - err = os.WriteFile(filepath.Join(binDir, "phpstan"), []byte(""), 0755) - require.NoError(t, err) - - analyser, found := DetectAnalyser(dir) - assert.True(t, found) - assert.Equal(t, AnalyserPHPStan, analyser) - }) - - t.Run("detects larastan from larastan/larastan vendor path", func(t *testing.T) { - dir := t.TempDir() - - // Create phpstan.neon - err := os.WriteFile(filepath.Join(dir, "phpstan.neon"), []byte(""), 0644) - require.NoError(t, err) - - // Create larastan/larastan path - larastanPath := filepath.Join(dir, "vendor", "larastan", "larastan") - err = os.MkdirAll(larastanPath, 0755) - require.NoError(t, err) - - analyser, found := DetectAnalyser(dir) - assert.True(t, found) - assert.Equal(t, AnalyserLarastan, analyser) - }) - - t.Run("detects larastan from nunomaduro/larastan vendor path", func(t *testing.T) { - dir := t.TempDir() - - // Create phpstan.neon - err := os.WriteFile(filepath.Join(dir, "phpstan.neon"), []byte(""), 0644) - require.NoError(t, err) - - // Create nunomaduro/larastan path - larastanPath := filepath.Join(dir, "vendor", "nunomaduro", "larastan") - err = os.MkdirAll(larastanPath, 0755) - require.NoError(t, err) - - analyser, found := DetectAnalyser(dir) - assert.True(t, found) - assert.Equal(t, AnalyserLarastan, analyser) - }) -} - -func TestBuildPintCommand_Extended(t *testing.T) { - t.Run("uses global pint when no vendor binary", func(t *testing.T) { - dir := t.TempDir() - opts := FormatOptions{Dir: dir} - - cmd, _ := buildPintCommand(opts) - assert.Equal(t, "pint", cmd) - }) - - t.Run("adds test flag when Fix is false", func(t *testing.T) { - dir := t.TempDir() - opts := FormatOptions{Dir: dir, Fix: false} - - _, args := buildPintCommand(opts) - assert.Contains(t, args, "--test") - }) - - t.Run("does not add test flag when Fix is true", func(t *testing.T) { - dir := t.TempDir() - opts := FormatOptions{Dir: dir, Fix: true} - - _, args := buildPintCommand(opts) - assert.NotContains(t, args, "--test") - }) - - t.Run("adds diff flag", func(t *testing.T) { - dir := t.TempDir() - opts := FormatOptions{Dir: dir, Diff: true} - - _, args := buildPintCommand(opts) - assert.Contains(t, args, "--diff") - }) - - t.Run("adds paths", func(t *testing.T) { - dir := t.TempDir() - opts := FormatOptions{Dir: dir, Paths: []string{"app", "tests"}} - - _, args := buildPintCommand(opts) - assert.Contains(t, args, "app") - assert.Contains(t, args, "tests") - }) -} - -func TestBuildPHPStanCommand_Extended(t *testing.T) { - t.Run("uses global phpstan when no vendor binary", func(t *testing.T) { - dir := t.TempDir() - opts := AnalyseOptions{Dir: dir} - - cmd, _ := buildPHPStanCommand(opts) - assert.Equal(t, "phpstan", cmd) - }) - - t.Run("adds level flag", func(t *testing.T) { - dir := t.TempDir() - opts := AnalyseOptions{Dir: dir, Level: 8} - - _, args := buildPHPStanCommand(opts) - assert.Contains(t, args, "--level") - assert.Contains(t, args, "8") - }) - - t.Run("does not add level flag when zero", func(t *testing.T) { - dir := t.TempDir() - opts := AnalyseOptions{Dir: dir, Level: 0} - - _, args := buildPHPStanCommand(opts) - assert.NotContains(t, args, "--level") - }) - - t.Run("adds memory limit", func(t *testing.T) { - dir := t.TempDir() - opts := AnalyseOptions{Dir: dir, Memory: "4G"} - - _, args := buildPHPStanCommand(opts) - assert.Contains(t, args, "--memory-limit") - assert.Contains(t, args, "4G") - }) - - t.Run("does not add memory flag when empty", func(t *testing.T) { - dir := t.TempDir() - opts := AnalyseOptions{Dir: dir, Memory: ""} - - _, args := buildPHPStanCommand(opts) - assert.NotContains(t, args, "--memory-limit") - }) - - t.Run("adds paths", func(t *testing.T) { - dir := t.TempDir() - opts := AnalyseOptions{Dir: dir, Paths: []string{"src", "app"}} - - _, args := buildPHPStanCommand(opts) - assert.Contains(t, args, "src") - assert.Contains(t, args, "app") - }) -} - -func TestFormat_Bad(t *testing.T) { - t.Run("fails when no formatter found", func(t *testing.T) { - dir := t.TempDir() - opts := FormatOptions{Dir: dir} - - err := Format(nil, opts) - assert.Error(t, err) - assert.Contains(t, err.Error(), "no formatter found") - }) - - t.Run("uses cwd when dir not specified", func(t *testing.T) { - // When no formatter found in cwd, should still fail with "no formatter found" - opts := FormatOptions{Dir: ""} - - err := Format(nil, opts) - // May or may not find a formatter depending on cwd, but function should not panic - if err != nil { - // Expected - no formatter in cwd - assert.Contains(t, err.Error(), "no formatter") - } - }) - - t.Run("uses stdout when output not specified", func(t *testing.T) { - dir := t.TempDir() - // Create pint.json to enable formatter detection - err := os.WriteFile(filepath.Join(dir, "pint.json"), []byte("{}"), 0644) - require.NoError(t, err) - - opts := FormatOptions{Dir: dir, Output: nil} - - // Will fail because pint isn't actually installed, but tests the code path - err = Format(context.Background(), opts) - assert.Error(t, err) // Pint not installed - }) -} - -func TestAnalyse_Bad(t *testing.T) { - t.Run("fails when no analyser found", func(t *testing.T) { - dir := t.TempDir() - opts := AnalyseOptions{Dir: dir} - - err := Analyse(nil, opts) - assert.Error(t, err) - assert.Contains(t, err.Error(), "no static analyser found") - }) - - t.Run("uses cwd when dir not specified", func(t *testing.T) { - opts := AnalyseOptions{Dir: ""} - - err := Analyse(nil, opts) - // May or may not find an analyser depending on cwd - if err != nil { - assert.Contains(t, err.Error(), "no static analyser") - } - }) - - t.Run("uses stdout when output not specified", func(t *testing.T) { - dir := t.TempDir() - // Create phpstan.neon to enable analyser detection - err := os.WriteFile(filepath.Join(dir, "phpstan.neon"), []byte(""), 0644) - require.NoError(t, err) - - opts := AnalyseOptions{Dir: dir, Output: nil} - - // Will fail because phpstan isn't actually installed, but tests the code path - err = Analyse(context.Background(), opts) - assert.Error(t, err) // PHPStan not installed - }) -} diff --git a/pkg/php/quality_test.go b/pkg/php/quality_test.go deleted file mode 100644 index 710e3fa..0000000 --- a/pkg/php/quality_test.go +++ /dev/null @@ -1,517 +0,0 @@ -package php - -import ( - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestDetectFormatter_Good(t *testing.T) { - t.Run("detects pint.json", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "pint.json"), []byte("{}"), 0644) - require.NoError(t, err) - - formatter, found := DetectFormatter(dir) - assert.True(t, found) - assert.Equal(t, FormatterPint, formatter) - }) - - t.Run("detects vendor binary", func(t *testing.T) { - dir := t.TempDir() - binDir := filepath.Join(dir, "vendor", "bin") - err := os.MkdirAll(binDir, 0755) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(binDir, "pint"), []byte(""), 0755) - require.NoError(t, err) - - formatter, found := DetectFormatter(dir) - assert.True(t, found) - assert.Equal(t, FormatterPint, formatter) - }) -} - -func TestDetectFormatter_Bad(t *testing.T) { - t.Run("no formatter", func(t *testing.T) { - dir := t.TempDir() - _, found := DetectFormatter(dir) - assert.False(t, found) - }) -} - -func TestDetectAnalyser_Good(t *testing.T) { - t.Run("detects phpstan.neon", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "phpstan.neon"), []byte(""), 0644) - require.NoError(t, err) - - analyser, found := DetectAnalyser(dir) - assert.True(t, found) - assert.Equal(t, AnalyserPHPStan, analyser) - }) - - t.Run("detects phpstan.neon.dist", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "phpstan.neon.dist"), []byte(""), 0644) - require.NoError(t, err) - - analyser, found := DetectAnalyser(dir) - assert.True(t, found) - assert.Equal(t, AnalyserPHPStan, analyser) - }) - - t.Run("detects larastan", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "phpstan.neon"), []byte(""), 0644) - require.NoError(t, err) - - larastanDir := filepath.Join(dir, "vendor", "larastan", "larastan") - err = os.MkdirAll(larastanDir, 0755) - require.NoError(t, err) - - analyser, found := DetectAnalyser(dir) - assert.True(t, found) - assert.Equal(t, AnalyserLarastan, analyser) - }) - - t.Run("detects nunomaduro/larastan", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "phpstan.neon"), []byte(""), 0644) - require.NoError(t, err) - - larastanDir := filepath.Join(dir, "vendor", "nunomaduro", "larastan") - err = os.MkdirAll(larastanDir, 0755) - require.NoError(t, err) - - analyser, found := DetectAnalyser(dir) - assert.True(t, found) - assert.Equal(t, AnalyserLarastan, analyser) - }) -} - -func TestBuildPintCommand_Good(t *testing.T) { - t.Run("basic command", func(t *testing.T) { - dir := t.TempDir() - opts := FormatOptions{Dir: dir} - cmd, args := buildPintCommand(opts) - assert.Equal(t, "pint", cmd) - assert.Contains(t, args, "--test") - }) - - t.Run("fix enabled", func(t *testing.T) { - dir := t.TempDir() - opts := FormatOptions{Dir: dir, Fix: true} - _, args := buildPintCommand(opts) - assert.NotContains(t, args, "--test") - }) - - t.Run("diff enabled", func(t *testing.T) { - dir := t.TempDir() - opts := FormatOptions{Dir: dir, Diff: true} - _, args := buildPintCommand(opts) - assert.Contains(t, args, "--diff") - }) - - t.Run("with specific paths", func(t *testing.T) { - dir := t.TempDir() - paths := []string{"app", "tests"} - opts := FormatOptions{Dir: dir, Paths: paths} - _, args := buildPintCommand(opts) - assert.Equal(t, paths, args[len(args)-2:]) - }) - - t.Run("uses vendor binary if exists", func(t *testing.T) { - dir := t.TempDir() - binDir := filepath.Join(dir, "vendor", "bin") - err := os.MkdirAll(binDir, 0755) - require.NoError(t, err) - pintPath := filepath.Join(binDir, "pint") - err = os.WriteFile(pintPath, []byte(""), 0755) - require.NoError(t, err) - - opts := FormatOptions{Dir: dir} - cmd, _ := buildPintCommand(opts) - assert.Equal(t, pintPath, cmd) - }) -} - -func TestBuildPHPStanCommand_Good(t *testing.T) { - t.Run("basic command", func(t *testing.T) { - dir := t.TempDir() - opts := AnalyseOptions{Dir: dir} - cmd, args := buildPHPStanCommand(opts) - assert.Equal(t, "phpstan", cmd) - assert.Equal(t, []string{"analyse"}, args) - }) - - t.Run("with level", func(t *testing.T) { - dir := t.TempDir() - opts := AnalyseOptions{Dir: dir, Level: 5} - _, args := buildPHPStanCommand(opts) - assert.Contains(t, args, "--level") - assert.Contains(t, args, "5") - }) - - t.Run("with memory limit", func(t *testing.T) { - dir := t.TempDir() - opts := AnalyseOptions{Dir: dir, Memory: "2G"} - _, args := buildPHPStanCommand(opts) - assert.Contains(t, args, "--memory-limit") - assert.Contains(t, args, "2G") - }) - - t.Run("uses vendor binary if exists", func(t *testing.T) { - dir := t.TempDir() - binDir := filepath.Join(dir, "vendor", "bin") - err := os.MkdirAll(binDir, 0755) - require.NoError(t, err) - phpstanPath := filepath.Join(binDir, "phpstan") - err = os.WriteFile(phpstanPath, []byte(""), 0755) - require.NoError(t, err) - - opts := AnalyseOptions{Dir: dir} - cmd, _ := buildPHPStanCommand(opts) - assert.Equal(t, phpstanPath, cmd) - }) -} - -// ============================================================================= -// Psalm Detection Tests -// ============================================================================= - -func TestDetectPsalm_Good(t *testing.T) { - t.Run("detects psalm.xml", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "psalm.xml"), []byte(""), 0644) - require.NoError(t, err) - - // Also need vendor binary for it to return true - binDir := filepath.Join(dir, "vendor", "bin") - err = os.MkdirAll(binDir, 0755) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(binDir, "psalm"), []byte(""), 0755) - require.NoError(t, err) - - psalmType, found := DetectPsalm(dir) - assert.True(t, found) - assert.Equal(t, PsalmStandard, psalmType) - }) - - t.Run("detects psalm.xml.dist", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "psalm.xml.dist"), []byte(""), 0644) - require.NoError(t, err) - - binDir := filepath.Join(dir, "vendor", "bin") - err = os.MkdirAll(binDir, 0755) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(binDir, "psalm"), []byte(""), 0755) - require.NoError(t, err) - - _, found := DetectPsalm(dir) - assert.True(t, found) - }) - - t.Run("detects vendor binary only", func(t *testing.T) { - dir := t.TempDir() - binDir := filepath.Join(dir, "vendor", "bin") - err := os.MkdirAll(binDir, 0755) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(binDir, "psalm"), []byte(""), 0755) - require.NoError(t, err) - - _, found := DetectPsalm(dir) - assert.True(t, found) - }) -} - -func TestDetectPsalm_Bad(t *testing.T) { - t.Run("no psalm", func(t *testing.T) { - dir := t.TempDir() - _, found := DetectPsalm(dir) - assert.False(t, found) - }) -} - -// ============================================================================= -// Rector Detection Tests -// ============================================================================= - -func TestDetectRector_Good(t *testing.T) { - t.Run("detects rector.php", func(t *testing.T) { - dir := t.TempDir() - err := os.WriteFile(filepath.Join(dir, "rector.php"), []byte("", - Short: i18n.T("cmd.pkg.install.short"), - Long: i18n.T("cmd.pkg.install.long"), - RunE: func(cmd *cobra.Command, args []string) error { - if len(args) == 0 { - return errors.New(i18n.T("cmd.pkg.error.repo_required")) - } - return runPkgInstall(args[0], installTargetDir, installAddToReg) - }, - } - - installCmd.Flags().StringVar(&installTargetDir, "dir", "", i18n.T("cmd.pkg.install.flag.dir")) - installCmd.Flags().BoolVar(&installAddToReg, "add", false, i18n.T("cmd.pkg.install.flag.add")) - - parent.AddCommand(installCmd) -} - -func runPkgInstall(repoArg, targetDir string, addToRegistry bool) error { - ctx := context.Background() - - // Parse org/repo - parts := strings.Split(repoArg, "/") - if len(parts) != 2 { - return errors.New(i18n.T("cmd.pkg.error.invalid_repo_format")) - } - org, repoName := parts[0], parts[1] - - // Determine target directory - if targetDir == "" { - if regPath, err := repos.FindRegistry(); err == nil { - if reg, err := repos.LoadRegistry(regPath); err == nil { - targetDir = reg.BasePath - if targetDir == "" { - targetDir = "./packages" - } - if !filepath.IsAbs(targetDir) { - targetDir = filepath.Join(filepath.Dir(regPath), targetDir) - } - } - } - if targetDir == "" { - targetDir = "." - } - } - - if strings.HasPrefix(targetDir, "~/") { - home, _ := os.UserHomeDir() - targetDir = filepath.Join(home, targetDir[2:]) - } - - repoPath := filepath.Join(targetDir, repoName) - - if _, err := os.Stat(filepath.Join(repoPath, ".git")); err == nil { - fmt.Printf("%s %s\n", dimStyle.Render(i18n.Label("skip")), i18n.T("cmd.pkg.install.already_exists", map[string]string{"Name": repoName, "Path": repoPath})) - return nil - } - - if err := os.MkdirAll(targetDir, 0755); err != nil { - return fmt.Errorf("%s: %w", i18n.T("i18n.fail.create", "directory"), err) - } - - fmt.Printf("%s %s/%s\n", dimStyle.Render(i18n.T("cmd.pkg.install.installing_label")), org, repoName) - fmt.Printf("%s %s\n", dimStyle.Render(i18n.Label("target")), repoPath) - fmt.Println() - - fmt.Printf(" %s... ", dimStyle.Render(i18n.T("common.status.cloning"))) - err := gitClone(ctx, org, repoName, repoPath) - if err != nil { - fmt.Printf("%s\n", errorStyle.Render("✗ "+err.Error())) - return err - } - fmt.Printf("%s\n", successStyle.Render("✓")) - - if addToRegistry { - if err := addToRegistryFile(org, repoName); err != nil { - fmt.Printf(" %s %s: %s\n", errorStyle.Render("✗"), i18n.T("cmd.pkg.install.add_to_registry"), err) - } else { - fmt.Printf(" %s %s\n", successStyle.Render("✓"), i18n.T("cmd.pkg.install.added_to_registry")) - } - } - - fmt.Println() - fmt.Printf("%s %s\n", successStyle.Render(i18n.T("i18n.done.install")), i18n.T("cmd.pkg.install.installed", map[string]string{"Name": repoName})) - - return nil -} - -func addToRegistryFile(org, repoName string) error { - regPath, err := repos.FindRegistry() - if err != nil { - return errors.New(i18n.T("cmd.pkg.error.no_repos_yaml")) - } - - reg, err := repos.LoadRegistry(regPath) - if err != nil { - return err - } - - if _, exists := reg.Get(repoName); exists { - return nil - } - - f, err := os.OpenFile(regPath, os.O_APPEND|os.O_WRONLY, 0644) - if err != nil { - return err - } - defer f.Close() - - repoType := detectRepoType(repoName) - entry := fmt.Sprintf("\n %s:\n type: %s\n description: (installed via core pkg install)\n", - repoName, repoType) - - _, err = f.WriteString(entry) - return err -} - -func detectRepoType(name string) string { - lower := strings.ToLower(name) - if strings.Contains(lower, "-mod-") || strings.HasSuffix(lower, "-mod") { - return "module" - } - if strings.Contains(lower, "-plug-") || strings.HasSuffix(lower, "-plug") { - return "plugin" - } - if strings.Contains(lower, "-services-") || strings.HasSuffix(lower, "-services") { - return "service" - } - if strings.Contains(lower, "-website-") || strings.HasSuffix(lower, "-website") { - return "website" - } - if strings.HasPrefix(lower, "core-") { - return "package" - } - return "package" -} diff --git a/pkg/pkgcmd/cmd_manage.go b/pkg/pkgcmd/cmd_manage.go deleted file mode 100644 index d7f1bb9..0000000 --- a/pkg/pkgcmd/cmd_manage.go +++ /dev/null @@ -1,257 +0,0 @@ -package pkgcmd - -import ( - "errors" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" - "github.com/spf13/cobra" -) - -// addPkgListCommand adds the 'pkg list' command. -func addPkgListCommand(parent *cobra.Command) { - listCmd := &cobra.Command{ - Use: "list", - Short: i18n.T("cmd.pkg.list.short"), - Long: i18n.T("cmd.pkg.list.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return runPkgList() - }, - } - - parent.AddCommand(listCmd) -} - -func runPkgList() error { - regPath, err := repos.FindRegistry() - if err != nil { - return errors.New(i18n.T("cmd.pkg.error.no_repos_yaml_workspace")) - } - - reg, err := repos.LoadRegistry(regPath) - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("i18n.fail.load", "registry"), err) - } - - basePath := reg.BasePath - if basePath == "" { - basePath = "." - } - if !filepath.IsAbs(basePath) { - basePath = filepath.Join(filepath.Dir(regPath), basePath) - } - - allRepos := reg.List() - if len(allRepos) == 0 { - fmt.Println(i18n.T("cmd.pkg.list.no_packages")) - return nil - } - - fmt.Printf("%s\n\n", repoNameStyle.Render(i18n.T("cmd.pkg.list.title"))) - - var installed, missing int - for _, r := range allRepos { - repoPath := filepath.Join(basePath, r.Name) - exists := false - if _, err := os.Stat(filepath.Join(repoPath, ".git")); err == nil { - exists = true - installed++ - } else { - missing++ - } - - status := successStyle.Render("✓") - if !exists { - status = dimStyle.Render("○") - } - - desc := r.Description - if len(desc) > 40 { - desc = desc[:37] + "..." - } - if desc == "" { - desc = dimStyle.Render(i18n.T("cmd.pkg.no_description")) - } - - fmt.Printf(" %s %s\n", status, repoNameStyle.Render(r.Name)) - fmt.Printf(" %s\n", desc) - } - - fmt.Println() - fmt.Printf("%s %s\n", dimStyle.Render(i18n.Label("total")), i18n.T("cmd.pkg.list.summary", map[string]int{"Installed": installed, "Missing": missing})) - - if missing > 0 { - fmt.Printf("\n%s %s\n", i18n.T("cmd.pkg.list.install_missing"), dimStyle.Render("core setup")) - } - - return nil -} - -var updateAll bool - -// addPkgUpdateCommand adds the 'pkg update' command. -func addPkgUpdateCommand(parent *cobra.Command) { - updateCmd := &cobra.Command{ - Use: "update [packages...]", - Short: i18n.T("cmd.pkg.update.short"), - Long: i18n.T("cmd.pkg.update.long"), - RunE: func(cmd *cobra.Command, args []string) error { - if !updateAll && len(args) == 0 { - return errors.New(i18n.T("cmd.pkg.error.specify_package")) - } - return runPkgUpdate(args, updateAll) - }, - } - - updateCmd.Flags().BoolVar(&updateAll, "all", false, i18n.T("cmd.pkg.update.flag.all")) - - parent.AddCommand(updateCmd) -} - -func runPkgUpdate(packages []string, all bool) error { - regPath, err := repos.FindRegistry() - if err != nil { - return errors.New(i18n.T("cmd.pkg.error.no_repos_yaml")) - } - - reg, err := repos.LoadRegistry(regPath) - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("i18n.fail.load", "registry"), err) - } - - basePath := reg.BasePath - if basePath == "" { - basePath = "." - } - if !filepath.IsAbs(basePath) { - basePath = filepath.Join(filepath.Dir(regPath), basePath) - } - - var toUpdate []string - if all { - for _, r := range reg.List() { - toUpdate = append(toUpdate, r.Name) - } - } else { - toUpdate = packages - } - - fmt.Printf("%s %s\n\n", dimStyle.Render(i18n.T("cmd.pkg.update.update_label")), i18n.T("cmd.pkg.update.updating", map[string]int{"Count": len(toUpdate)})) - - var updated, skipped, failed int - for _, name := range toUpdate { - repoPath := filepath.Join(basePath, name) - - if _, err := os.Stat(filepath.Join(repoPath, ".git")); os.IsNotExist(err) { - fmt.Printf(" %s %s (%s)\n", dimStyle.Render("○"), name, i18n.T("cmd.pkg.update.not_installed")) - skipped++ - continue - } - - fmt.Printf(" %s %s... ", dimStyle.Render("↓"), name) - - cmd := exec.Command("git", "-C", repoPath, "pull", "--ff-only") - output, err := cmd.CombinedOutput() - if err != nil { - fmt.Printf("%s\n", errorStyle.Render("✗")) - fmt.Printf(" %s\n", strings.TrimSpace(string(output))) - failed++ - continue - } - - if strings.Contains(string(output), "Already up to date") { - fmt.Printf("%s\n", dimStyle.Render(i18n.T("common.status.up_to_date"))) - } else { - fmt.Printf("%s\n", successStyle.Render("✓")) - } - updated++ - } - - fmt.Println() - fmt.Printf("%s %s\n", - dimStyle.Render(i18n.T("i18n.done.update")), i18n.T("cmd.pkg.update.summary", map[string]int{"Updated": updated, "Skipped": skipped, "Failed": failed})) - - return nil -} - -// addPkgOutdatedCommand adds the 'pkg outdated' command. -func addPkgOutdatedCommand(parent *cobra.Command) { - outdatedCmd := &cobra.Command{ - Use: "outdated", - Short: i18n.T("cmd.pkg.outdated.short"), - Long: i18n.T("cmd.pkg.outdated.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return runPkgOutdated() - }, - } - - parent.AddCommand(outdatedCmd) -} - -func runPkgOutdated() error { - regPath, err := repos.FindRegistry() - if err != nil { - return errors.New(i18n.T("cmd.pkg.error.no_repos_yaml")) - } - - reg, err := repos.LoadRegistry(regPath) - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("i18n.fail.load", "registry"), err) - } - - basePath := reg.BasePath - if basePath == "" { - basePath = "." - } - if !filepath.IsAbs(basePath) { - basePath = filepath.Join(filepath.Dir(regPath), basePath) - } - - fmt.Printf("%s %s\n\n", dimStyle.Render(i18n.T("cmd.pkg.outdated.outdated_label")), i18n.T("common.progress.checking_updates")) - - var outdated, upToDate, notInstalled int - - for _, r := range reg.List() { - repoPath := filepath.Join(basePath, r.Name) - - if _, err := os.Stat(filepath.Join(repoPath, ".git")); os.IsNotExist(err) { - notInstalled++ - continue - } - - // Fetch updates - exec.Command("git", "-C", repoPath, "fetch", "--quiet").Run() - - // Check if behind - cmd := exec.Command("git", "-C", repoPath, "rev-list", "--count", "HEAD..@{u}") - output, err := cmd.Output() - if err != nil { - continue - } - - count := strings.TrimSpace(string(output)) - if count != "0" { - fmt.Printf(" %s %s (%s)\n", - errorStyle.Render("↓"), repoNameStyle.Render(r.Name), i18n.T("cmd.pkg.outdated.commits_behind", map[string]string{"Count": count})) - outdated++ - } else { - upToDate++ - } - } - - fmt.Println() - if outdated == 0 { - fmt.Printf("%s %s\n", successStyle.Render(i18n.T("i18n.done.update")), i18n.T("cmd.pkg.outdated.all_up_to_date")) - } else { - fmt.Printf("%s %s\n", - dimStyle.Render(i18n.Label("summary")), i18n.T("cmd.pkg.outdated.summary", map[string]int{"Outdated": outdated, "UpToDate": upToDate})) - fmt.Printf("\n%s %s\n", i18n.T("cmd.pkg.outdated.update_with"), dimStyle.Render("core pkg update --all")) - } - - return nil -} diff --git a/pkg/pkgcmd/cmd_pkg.go b/pkg/pkgcmd/cmd_pkg.go deleted file mode 100644 index baf2967..0000000 --- a/pkg/pkgcmd/cmd_pkg.go +++ /dev/null @@ -1,38 +0,0 @@ -// Package pkgcmd provides package management commands for core-* repos. -package pkgcmd - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -func init() { - cli.RegisterCommands(AddPkgCommands) -} - -// Style and utility aliases -var ( - repoNameStyle = cli.RepoStyle - successStyle = cli.SuccessStyle - errorStyle = cli.ErrorStyle - dimStyle = cli.DimStyle - ghAuthenticated = cli.GhAuthenticated - gitClone = cli.GitClone -) - -// AddPkgCommands adds the 'pkg' command and subcommands for package management. -func AddPkgCommands(root *cobra.Command) { - pkgCmd := &cobra.Command{ - Use: "pkg", - Short: i18n.T("cmd.pkg.short"), - Long: i18n.T("cmd.pkg.long"), - } - - root.AddCommand(pkgCmd) - addPkgSearchCommand(pkgCmd) - addPkgInstallCommand(pkgCmd) - addPkgListCommand(pkgCmd) - addPkgUpdateCommand(pkgCmd) - addPkgOutdatedCommand(pkgCmd) -} diff --git a/pkg/pkgcmd/cmd_search.go b/pkg/pkgcmd/cmd_search.go deleted file mode 100644 index c672ca7..0000000 --- a/pkg/pkgcmd/cmd_search.go +++ /dev/null @@ -1,204 +0,0 @@ -package pkgcmd - -import ( - "encoding/json" - "errors" - "fmt" - "os" - "os/exec" - "path/filepath" - "sort" - "strings" - "time" - - "github.com/host-uk/core/pkg/cache" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" - "github.com/spf13/cobra" -) - -var ( - searchOrg string - searchPattern string - searchType string - searchLimit int - searchRefresh bool -) - -// addPkgSearchCommand adds the 'pkg search' command. -func addPkgSearchCommand(parent *cobra.Command) { - searchCmd := &cobra.Command{ - Use: "search", - Short: i18n.T("cmd.pkg.search.short"), - Long: i18n.T("cmd.pkg.search.long"), - RunE: func(cmd *cobra.Command, args []string) error { - org := searchOrg - pattern := searchPattern - limit := searchLimit - if org == "" { - org = "host-uk" - } - if pattern == "" { - pattern = "*" - } - if limit == 0 { - limit = 50 - } - return runPkgSearch(org, pattern, searchType, limit, searchRefresh) - }, - } - - searchCmd.Flags().StringVar(&searchOrg, "org", "", i18n.T("cmd.pkg.search.flag.org")) - searchCmd.Flags().StringVar(&searchPattern, "pattern", "", i18n.T("cmd.pkg.search.flag.pattern")) - searchCmd.Flags().StringVar(&searchType, "type", "", i18n.T("cmd.pkg.search.flag.type")) - searchCmd.Flags().IntVar(&searchLimit, "limit", 0, i18n.T("cmd.pkg.search.flag.limit")) - searchCmd.Flags().BoolVar(&searchRefresh, "refresh", false, i18n.T("cmd.pkg.search.flag.refresh")) - - parent.AddCommand(searchCmd) -} - -type ghRepo struct { - Name string `json:"name"` - FullName string `json:"full_name"` - Description string `json:"description"` - Visibility string `json:"visibility"` - UpdatedAt string `json:"updated_at"` - Language string `json:"language"` -} - -func runPkgSearch(org, pattern, repoType string, limit int, refresh bool) error { - // Initialize cache in workspace .core/ directory - var cacheDir string - if regPath, err := repos.FindRegistry(); err == nil { - cacheDir = filepath.Join(filepath.Dir(regPath), ".core", "cache") - } - - c, err := cache.New(cacheDir, 0) - if err != nil { - c = nil - } - - cacheKey := cache.GitHubReposKey(org) - var ghRepos []ghRepo - var fromCache bool - - // Try cache first (unless refresh requested) - if c != nil && !refresh { - if found, err := c.Get(cacheKey, &ghRepos); found && err == nil { - fromCache = true - age := c.Age(cacheKey) - fmt.Printf("%s %s %s\n", dimStyle.Render(i18n.T("cmd.pkg.search.cache_label")), org, dimStyle.Render(fmt.Sprintf("(%s ago)", age.Round(time.Second)))) - } - } - - // Fetch from GitHub if not cached - if !fromCache { - if !ghAuthenticated() { - return errors.New(i18n.T("cmd.pkg.error.gh_not_authenticated")) - } - - if os.Getenv("GH_TOKEN") != "" { - fmt.Printf("%s %s\n", dimStyle.Render(i18n.Label("note")), i18n.T("cmd.pkg.search.gh_token_warning")) - fmt.Printf("%s %s\n\n", dimStyle.Render(""), i18n.T("cmd.pkg.search.gh_token_unset")) - } - - fmt.Printf("%s %s... ", dimStyle.Render(i18n.T("cmd.pkg.search.fetching_label")), org) - - cmd := exec.Command("gh", "repo", "list", org, - "--json", "name,description,visibility,updatedAt,primaryLanguage", - "--limit", fmt.Sprintf("%d", limit)) - output, err := cmd.CombinedOutput() - - if err != nil { - fmt.Println() - errStr := strings.TrimSpace(string(output)) - if strings.Contains(errStr, "401") || strings.Contains(errStr, "Bad credentials") { - return errors.New(i18n.T("cmd.pkg.error.auth_failed")) - } - return fmt.Errorf("%s: %s", i18n.T("cmd.pkg.error.search_failed"), errStr) - } - - if err := json.Unmarshal(output, &ghRepos); err != nil { - return fmt.Errorf("%s: %w", i18n.T("i18n.fail.parse", "results"), err) - } - - if c != nil { - _ = c.Set(cacheKey, ghRepos) - } - - fmt.Printf("%s\n", successStyle.Render("✓")) - } - - // Filter by glob pattern and type - var filtered []ghRepo - for _, r := range ghRepos { - if !matchGlob(pattern, r.Name) { - continue - } - if repoType != "" && !strings.Contains(r.Name, repoType) { - continue - } - filtered = append(filtered, r) - } - - if len(filtered) == 0 { - fmt.Println(i18n.T("cmd.pkg.search.no_repos_found")) - return nil - } - - sort.Slice(filtered, func(i, j int) bool { - return filtered[i].Name < filtered[j].Name - }) - - fmt.Print(i18n.T("cmd.pkg.search.found_repos", map[string]int{"Count": len(filtered)}) + "\n\n") - - for _, r := range filtered { - visibility := "" - if r.Visibility == "private" { - visibility = dimStyle.Render(" " + i18n.T("cmd.pkg.search.private_label")) - } - - desc := r.Description - if len(desc) > 50 { - desc = desc[:47] + "..." - } - if desc == "" { - desc = dimStyle.Render(i18n.T("cmd.pkg.no_description")) - } - - fmt.Printf(" %s%s\n", repoNameStyle.Render(r.Name), visibility) - fmt.Printf(" %s\n", desc) - } - - fmt.Println() - fmt.Printf("%s %s\n", i18n.T("common.hint.install_with"), dimStyle.Render(fmt.Sprintf("core pkg install %s/", org))) - - return nil -} - -// matchGlob does simple glob matching with * wildcards -func matchGlob(pattern, name string) bool { - if pattern == "*" || pattern == "" { - return true - } - - parts := strings.Split(pattern, "*") - pos := 0 - for i, part := range parts { - if part == "" { - continue - } - idx := strings.Index(name[pos:], part) - if idx == -1 { - return false - } - if i == 0 && !strings.HasPrefix(pattern, "*") && idx != 0 { - return false - } - pos += idx + len(part) - } - if !strings.HasSuffix(pattern, "*") && pos != len(name) { - return false - } - return true -} diff --git a/pkg/process/actions.go b/pkg/process/actions.go deleted file mode 100644 index 7f33cf8..0000000 --- a/pkg/process/actions.go +++ /dev/null @@ -1,37 +0,0 @@ -package process - -import "time" - -// --- ACTION messages (broadcast via Core.ACTION) --- - -// ActionProcessStarted is broadcast when a process begins execution. -type ActionProcessStarted struct { - ID string - Command string - Args []string - Dir string - PID int -} - -// ActionProcessOutput is broadcast for each line of output. -// Subscribe to this for real-time streaming. -type ActionProcessOutput struct { - ID string - Line string - Stream Stream -} - -// ActionProcessExited is broadcast when a process completes. -// Check ExitCode for success (0) or failure. -type ActionProcessExited struct { - ID string - ExitCode int - Duration time.Duration - Error error // Non-nil if failed to start or was killed -} - -// ActionProcessKilled is broadcast when a process is terminated. -type ActionProcessKilled struct { - ID string - Signal string -} diff --git a/pkg/process/buffer.go b/pkg/process/buffer.go deleted file mode 100644 index bf02f59..0000000 --- a/pkg/process/buffer.go +++ /dev/null @@ -1,108 +0,0 @@ -package process - -import "sync" - -// RingBuffer is a fixed-size circular buffer that overwrites old data. -// Thread-safe for concurrent reads and writes. -type RingBuffer struct { - data []byte - size int - start int - end int - full bool - mu sync.RWMutex -} - -// NewRingBuffer creates a ring buffer with the given capacity. -func NewRingBuffer(size int) *RingBuffer { - return &RingBuffer{ - data: make([]byte, size), - size: size, - } -} - -// Write appends data to the buffer, overwriting oldest data if full. -func (rb *RingBuffer) Write(p []byte) (n int, err error) { - rb.mu.Lock() - defer rb.mu.Unlock() - - for _, b := range p { - rb.data[rb.end] = b - rb.end = (rb.end + 1) % rb.size - if rb.full { - rb.start = (rb.start + 1) % rb.size - } - if rb.end == rb.start { - rb.full = true - } - } - return len(p), nil -} - -// String returns the buffer contents as a string. -func (rb *RingBuffer) String() string { - rb.mu.RLock() - defer rb.mu.RUnlock() - - if !rb.full && rb.start == rb.end { - return "" - } - - if rb.full { - result := make([]byte, rb.size) - copy(result, rb.data[rb.start:]) - copy(result[rb.size-rb.start:], rb.data[:rb.end]) - return string(result) - } - - return string(rb.data[rb.start:rb.end]) -} - -// Bytes returns a copy of the buffer contents. -func (rb *RingBuffer) Bytes() []byte { - rb.mu.RLock() - defer rb.mu.RUnlock() - - if !rb.full && rb.start == rb.end { - return nil - } - - if rb.full { - result := make([]byte, rb.size) - copy(result, rb.data[rb.start:]) - copy(result[rb.size-rb.start:], rb.data[:rb.end]) - return result - } - - result := make([]byte, rb.end-rb.start) - copy(result, rb.data[rb.start:rb.end]) - return result -} - -// Len returns the current length of data in the buffer. -func (rb *RingBuffer) Len() int { - rb.mu.RLock() - defer rb.mu.RUnlock() - - if rb.full { - return rb.size - } - if rb.end >= rb.start { - return rb.end - rb.start - } - return rb.size - rb.start + rb.end -} - -// Cap returns the buffer capacity. -func (rb *RingBuffer) Cap() int { - return rb.size -} - -// Reset clears the buffer. -func (rb *RingBuffer) Reset() { - rb.mu.Lock() - defer rb.mu.Unlock() - rb.start = 0 - rb.end = 0 - rb.full = false -} diff --git a/pkg/process/buffer_test.go b/pkg/process/buffer_test.go deleted file mode 100644 index ee07ebc..0000000 --- a/pkg/process/buffer_test.go +++ /dev/null @@ -1,72 +0,0 @@ -package process - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestRingBuffer(t *testing.T) { - t.Run("write and read", func(t *testing.T) { - rb := NewRingBuffer(10) - - n, err := rb.Write([]byte("hello")) - assert.NoError(t, err) - assert.Equal(t, 5, n) - assert.Equal(t, "hello", rb.String()) - assert.Equal(t, 5, rb.Len()) - }) - - t.Run("overflow wraps around", func(t *testing.T) { - rb := NewRingBuffer(5) - - rb.Write([]byte("hello")) - assert.Equal(t, "hello", rb.String()) - - rb.Write([]byte("world")) - // Should contain "world" (overwrote "hello") - assert.Equal(t, 5, rb.Len()) - assert.Equal(t, "world", rb.String()) - }) - - t.Run("partial overflow", func(t *testing.T) { - rb := NewRingBuffer(10) - - rb.Write([]byte("hello")) - rb.Write([]byte("worldx")) - // Should contain "lloworldx" (11 chars, buffer is 10) - assert.Equal(t, 10, rb.Len()) - }) - - t.Run("empty buffer", func(t *testing.T) { - rb := NewRingBuffer(10) - assert.Equal(t, "", rb.String()) - assert.Equal(t, 0, rb.Len()) - assert.Nil(t, rb.Bytes()) - }) - - t.Run("reset", func(t *testing.T) { - rb := NewRingBuffer(10) - rb.Write([]byte("hello")) - rb.Reset() - assert.Equal(t, "", rb.String()) - assert.Equal(t, 0, rb.Len()) - }) - - t.Run("cap", func(t *testing.T) { - rb := NewRingBuffer(42) - assert.Equal(t, 42, rb.Cap()) - }) - - t.Run("bytes returns copy", func(t *testing.T) { - rb := NewRingBuffer(10) - rb.Write([]byte("hello")) - - bytes := rb.Bytes() - assert.Equal(t, []byte("hello"), bytes) - - // Modifying returned bytes shouldn't affect buffer - bytes[0] = 'x' - assert.Equal(t, "hello", rb.String()) - }) -} diff --git a/pkg/process/global_test.go b/pkg/process/global_test.go deleted file mode 100644 index c1965f7..0000000 --- a/pkg/process/global_test.go +++ /dev/null @@ -1,298 +0,0 @@ -package process - -import ( - "context" - "sync" - "testing" - - "github.com/host-uk/core/pkg/framework" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestGlobal_DefaultNotInitialized(t *testing.T) { - // Reset global state for this test - old := defaultService.Swap(nil) - defer func() { - if old != nil { - defaultService.Store(old) - } - }() - - assert.Nil(t, Default()) - - _, err := Start(context.Background(), "echo", "test") - assert.ErrorIs(t, err, ErrServiceNotInitialized) - - _, err = Run(context.Background(), "echo", "test") - assert.ErrorIs(t, err, ErrServiceNotInitialized) - - _, err = Get("proc-1") - assert.ErrorIs(t, err, ErrServiceNotInitialized) - - assert.Nil(t, List()) - assert.Nil(t, Running()) - - err = Kill("proc-1") - assert.ErrorIs(t, err, ErrServiceNotInitialized) - - _, err = StartWithOptions(context.Background(), RunOptions{Command: "echo"}) - assert.ErrorIs(t, err, ErrServiceNotInitialized) - - _, err = RunWithOptions(context.Background(), RunOptions{Command: "echo"}) - assert.ErrorIs(t, err, ErrServiceNotInitialized) -} - -func TestGlobal_SetDefault(t *testing.T) { - t.Run("sets and retrieves service", func(t *testing.T) { - // Reset global state - old := defaultService.Swap(nil) - defer func() { - if old != nil { - defaultService.Store(old) - } - }() - - core, err := framework.New( - framework.WithName("process", NewService(Options{})), - ) - require.NoError(t, err) - - svc, err := framework.ServiceFor[*Service](core, "process") - require.NoError(t, err) - - SetDefault(svc) - assert.Equal(t, svc, Default()) - }) - - t.Run("panics on nil", func(t *testing.T) { - assert.Panics(t, func() { - SetDefault(nil) - }) - }) -} - -func TestGlobal_ConcurrentDefault(t *testing.T) { - // Reset global state - old := defaultService.Swap(nil) - defer func() { - if old != nil { - defaultService.Store(old) - } - }() - - core, err := framework.New( - framework.WithName("process", NewService(Options{})), - ) - require.NoError(t, err) - - svc, err := framework.ServiceFor[*Service](core, "process") - require.NoError(t, err) - - SetDefault(svc) - - // Concurrent reads of Default() - var wg sync.WaitGroup - for i := 0; i < 100; i++ { - wg.Add(1) - go func() { - defer wg.Done() - s := Default() - assert.NotNil(t, s) - assert.Equal(t, svc, s) - }() - } - wg.Wait() -} - -func TestGlobal_ConcurrentSetDefault(t *testing.T) { - // Reset global state - old := defaultService.Swap(nil) - defer func() { - if old != nil { - defaultService.Store(old) - } - }() - - // Create multiple services - var services []*Service - for i := 0; i < 10; i++ { - core, err := framework.New( - framework.WithName("process", NewService(Options{})), - ) - require.NoError(t, err) - - svc, err := framework.ServiceFor[*Service](core, "process") - require.NoError(t, err) - services = append(services, svc) - } - - // Concurrent SetDefault calls - should not panic or race - var wg sync.WaitGroup - for _, svc := range services { - wg.Add(1) - go func(s *Service) { - defer wg.Done() - SetDefault(s) - }(svc) - } - wg.Wait() - - // Final state should be one of the services - final := Default() - assert.NotNil(t, final) - - found := false - for _, svc := range services { - if svc == final { - found = true - break - } - } - assert.True(t, found, "Default should be one of the set services") -} - -func TestGlobal_ConcurrentOperations(t *testing.T) { - // Reset global state - old := defaultService.Swap(nil) - defer func() { - if old != nil { - defaultService.Store(old) - } - }() - - core, err := framework.New( - framework.WithName("process", NewService(Options{})), - ) - require.NoError(t, err) - - svc, err := framework.ServiceFor[*Service](core, "process") - require.NoError(t, err) - - SetDefault(svc) - - // Concurrent Start, List, Get operations - var wg sync.WaitGroup - var processes []*Process - var procMu sync.Mutex - - // Start 20 processes concurrently - for i := 0; i < 20; i++ { - wg.Add(1) - go func() { - defer wg.Done() - proc, err := Start(context.Background(), "echo", "concurrent") - if err == nil { - procMu.Lock() - processes = append(processes, proc) - procMu.Unlock() - } - }() - } - - // Concurrent List calls while starting - for i := 0; i < 10; i++ { - wg.Add(1) - go func() { - defer wg.Done() - _ = List() - _ = Running() - }() - } - - wg.Wait() - - // Wait for all processes to complete - procMu.Lock() - for _, p := range processes { - <-p.Done() - } - procMu.Unlock() - - // All should have succeeded - assert.Len(t, processes, 20) - - // Concurrent Get calls - var wg2 sync.WaitGroup - for _, p := range processes { - wg2.Add(1) - go func(id string) { - defer wg2.Done() - got, err := Get(id) - assert.NoError(t, err) - assert.NotNil(t, got) - }(p.ID) - } - wg2.Wait() -} - -func TestGlobal_StartWithOptions(t *testing.T) { - svc, _ := newTestService(t) - - // Set as default - old := defaultService.Swap(svc) - defer func() { - if old != nil { - defaultService.Store(old) - } - }() - - proc, err := StartWithOptions(context.Background(), RunOptions{ - Command: "echo", - Args: []string{"with", "options"}, - }) - require.NoError(t, err) - - <-proc.Done() - - assert.Equal(t, 0, proc.ExitCode) - assert.Contains(t, proc.Output(), "with options") -} - -func TestGlobal_RunWithOptions(t *testing.T) { - svc, _ := newTestService(t) - - // Set as default - old := defaultService.Swap(svc) - defer func() { - if old != nil { - defaultService.Store(old) - } - }() - - output, err := RunWithOptions(context.Background(), RunOptions{ - Command: "echo", - Args: []string{"run", "options"}, - }) - require.NoError(t, err) - assert.Contains(t, output, "run options") -} - -func TestGlobal_Running(t *testing.T) { - svc, _ := newTestService(t) - - // Set as default - old := defaultService.Swap(svc) - defer func() { - if old != nil { - defaultService.Store(old) - } - }() - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - // Start a long-running process - proc, err := Start(ctx, "sleep", "60") - require.NoError(t, err) - - running := Running() - assert.Len(t, running, 1) - assert.Equal(t, proc.ID, running[0].ID) - - cancel() - <-proc.Done() - - running = Running() - assert.Len(t, running, 0) -} diff --git a/pkg/process/process.go b/pkg/process/process.go deleted file mode 100644 index a70d391..0000000 --- a/pkg/process/process.go +++ /dev/null @@ -1,182 +0,0 @@ -package process - -import ( - "context" - "io" - "os/exec" - "sync" - "time" -) - -// Process represents a managed external process. -type Process struct { - ID string - Command string - Args []string - Dir string - Env []string - StartedAt time.Time - Status Status - ExitCode int - Duration time.Duration - - cmd *exec.Cmd - ctx context.Context - cancel context.CancelFunc - output *RingBuffer - stdin io.WriteCloser - done chan struct{} - mu sync.RWMutex -} - -// Info returns a snapshot of process state. -func (p *Process) Info() Info { - p.mu.RLock() - defer p.mu.RUnlock() - - pid := 0 - if p.cmd != nil && p.cmd.Process != nil { - pid = p.cmd.Process.Pid - } - - return Info{ - ID: p.ID, - Command: p.Command, - Args: p.Args, - Dir: p.Dir, - StartedAt: p.StartedAt, - Status: p.Status, - ExitCode: p.ExitCode, - Duration: p.Duration, - PID: pid, - } -} - -// Output returns the captured output as a string. -func (p *Process) Output() string { - p.mu.RLock() - defer p.mu.RUnlock() - if p.output == nil { - return "" - } - return p.output.String() -} - -// OutputBytes returns the captured output as bytes. -func (p *Process) OutputBytes() []byte { - p.mu.RLock() - defer p.mu.RUnlock() - if p.output == nil { - return nil - } - return p.output.Bytes() -} - -// IsRunning returns true if the process is still executing. -func (p *Process) IsRunning() bool { - p.mu.RLock() - defer p.mu.RUnlock() - return p.Status == StatusRunning -} - -// Wait blocks until the process exits. -func (p *Process) Wait() error { - <-p.done - p.mu.RLock() - defer p.mu.RUnlock() - if p.Status == StatusFailed || p.Status == StatusKilled { - return &exec.ExitError{} - } - if p.ExitCode != 0 { - return &exec.ExitError{} - } - return nil -} - -// Done returns a channel that closes when the process exits. -func (p *Process) Done() <-chan struct{} { - return p.done -} - -// Kill forcefully terminates the process. -func (p *Process) Kill() error { - p.mu.Lock() - defer p.mu.Unlock() - - if p.Status != StatusRunning { - return nil - } - - if p.cmd == nil || p.cmd.Process == nil { - return nil - } - - return p.cmd.Process.Kill() -} - -// Signal sends a signal to the process. -func (p *Process) Signal(sig interface{ Signal() }) error { - p.mu.Lock() - defer p.mu.Unlock() - - if p.Status != StatusRunning { - return nil - } - - if p.cmd == nil || p.cmd.Process == nil { - return nil - } - - // Type assert to os.Signal for Process.Signal - if osSig, ok := sig.(interface{ String() string }); ok { - _ = osSig // Satisfy linter - } - - return p.cmd.Process.Kill() // Simplified - would use Signal in full impl -} - -// SendInput writes to the process stdin. -func (p *Process) SendInput(input string) error { - p.mu.RLock() - defer p.mu.RUnlock() - - if p.Status != StatusRunning { - return ErrProcessNotRunning - } - - if p.stdin == nil { - return ErrStdinNotAvailable - } - - _, err := p.stdin.Write([]byte(input)) - return err -} - -// CloseStdin closes the process stdin pipe. -func (p *Process) CloseStdin() error { - p.mu.Lock() - defer p.mu.Unlock() - - if p.stdin == nil { - return nil - } - - err := p.stdin.Close() - p.stdin = nil - return err -} - -// setStatus updates the process status (internal use). -func (p *Process) setStatus(status Status) { - p.mu.Lock() - defer p.mu.Unlock() - p.Status = status -} - -// setExitCode sets the exit code and duration (internal use). -func (p *Process) setExitCode(code int, duration time.Duration) { - p.mu.Lock() - defer p.mu.Unlock() - p.ExitCode = code - p.Duration = duration -} diff --git a/pkg/process/process_global.go b/pkg/process/process_global.go deleted file mode 100644 index 9a0ffc8..0000000 --- a/pkg/process/process_global.go +++ /dev/null @@ -1,132 +0,0 @@ -package process - -import ( - "context" - "sync" - "sync/atomic" - - "github.com/host-uk/core/pkg/framework" -) - -// Global default service (follows i18n pattern). -var ( - defaultService atomic.Pointer[Service] - defaultOnce sync.Once - defaultErr error -) - -// Default returns the global process service. -// Returns nil if not initialized. -func Default() *Service { - return defaultService.Load() -} - -// SetDefault sets the global process service. -// Thread-safe: can be called concurrently with Default(). -func SetDefault(s *Service) { - if s == nil { - panic("process: SetDefault called with nil service") - } - defaultService.Store(s) -} - -// Init initializes the default global service with a Core instance. -// This is typically called during application startup. -func Init(c *framework.Core) error { - defaultOnce.Do(func() { - factory := NewService(Options{}) - svc, err := factory(c) - if err != nil { - defaultErr = err - return - } - defaultService.Store(svc.(*Service)) - }) - return defaultErr -} - -// --- Global convenience functions --- - -// Start spawns a new process using the default service. -func Start(ctx context.Context, command string, args ...string) (*Process, error) { - svc := Default() - if svc == nil { - return nil, ErrServiceNotInitialized - } - return svc.Start(ctx, command, args...) -} - -// Run executes a command and waits for completion using the default service. -func Run(ctx context.Context, command string, args ...string) (string, error) { - svc := Default() - if svc == nil { - return "", ErrServiceNotInitialized - } - return svc.Run(ctx, command, args...) -} - -// Get returns a process by ID from the default service. -func Get(id string) (*Process, error) { - svc := Default() - if svc == nil { - return nil, ErrServiceNotInitialized - } - return svc.Get(id) -} - -// List returns all processes from the default service. -func List() []*Process { - svc := Default() - if svc == nil { - return nil - } - return svc.List() -} - -// Kill terminates a process by ID using the default service. -func Kill(id string) error { - svc := Default() - if svc == nil { - return ErrServiceNotInitialized - } - return svc.Kill(id) -} - -// StartWithOptions spawns a process with full configuration using the default service. -func StartWithOptions(ctx context.Context, opts RunOptions) (*Process, error) { - svc := Default() - if svc == nil { - return nil, ErrServiceNotInitialized - } - return svc.StartWithOptions(ctx, opts) -} - -// RunWithOptions executes a command with options and waits using the default service. -func RunWithOptions(ctx context.Context, opts RunOptions) (string, error) { - svc := Default() - if svc == nil { - return "", ErrServiceNotInitialized - } - return svc.RunWithOptions(ctx, opts) -} - -// Running returns all currently running processes from the default service. -func Running() []*Process { - svc := Default() - if svc == nil { - return nil - } - return svc.Running() -} - -// ErrServiceNotInitialized is returned when the service is not initialized. -var ErrServiceNotInitialized = &ServiceError{msg: "process: service not initialized"} - -// ServiceError represents a service-level error. -type ServiceError struct { - msg string -} - -func (e *ServiceError) Error() string { - return e.msg -} diff --git a/pkg/process/process_test.go b/pkg/process/process_test.go deleted file mode 100644 index 8bf7bf7..0000000 --- a/pkg/process/process_test.go +++ /dev/null @@ -1,227 +0,0 @@ -package process - -import ( - "context" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestProcess_Info(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.Start(context.Background(), "echo", "hello") - require.NoError(t, err) - - <-proc.Done() - - info := proc.Info() - assert.Equal(t, proc.ID, info.ID) - assert.Equal(t, "echo", info.Command) - assert.Equal(t, []string{"hello"}, info.Args) - assert.Equal(t, StatusExited, info.Status) - assert.Equal(t, 0, info.ExitCode) - assert.Greater(t, info.Duration, time.Duration(0)) -} - -func TestProcess_Output(t *testing.T) { - t.Run("captures stdout", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.Start(context.Background(), "echo", "hello world") - require.NoError(t, err) - - <-proc.Done() - - output := proc.Output() - assert.Contains(t, output, "hello world") - }) - - t.Run("OutputBytes returns copy", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.Start(context.Background(), "echo", "test") - require.NoError(t, err) - - <-proc.Done() - - bytes := proc.OutputBytes() - assert.NotNil(t, bytes) - assert.Contains(t, string(bytes), "test") - }) -} - -func TestProcess_IsRunning(t *testing.T) { - t.Run("true while running", func(t *testing.T) { - svc, _ := newTestService(t) - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - proc, err := svc.Start(ctx, "sleep", "10") - require.NoError(t, err) - - assert.True(t, proc.IsRunning()) - - cancel() - <-proc.Done() - - assert.False(t, proc.IsRunning()) - }) - - t.Run("false after completion", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.Start(context.Background(), "echo", "done") - require.NoError(t, err) - - <-proc.Done() - - assert.False(t, proc.IsRunning()) - }) -} - -func TestProcess_Wait(t *testing.T) { - t.Run("returns nil on success", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.Start(context.Background(), "echo", "ok") - require.NoError(t, err) - - err = proc.Wait() - assert.NoError(t, err) - }) - - t.Run("returns error on failure", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.Start(context.Background(), "sh", "-c", "exit 1") - require.NoError(t, err) - - err = proc.Wait() - assert.Error(t, err) - }) -} - -func TestProcess_Done(t *testing.T) { - t.Run("channel closes on completion", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.Start(context.Background(), "echo", "test") - require.NoError(t, err) - - select { - case <-proc.Done(): - // Success - channel closed - case <-time.After(5 * time.Second): - t.Fatal("Done channel should have closed") - } - }) -} - -func TestProcess_Kill(t *testing.T) { - t.Run("terminates running process", func(t *testing.T) { - svc, _ := newTestService(t) - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - proc, err := svc.Start(ctx, "sleep", "60") - require.NoError(t, err) - - assert.True(t, proc.IsRunning()) - - err = proc.Kill() - assert.NoError(t, err) - - select { - case <-proc.Done(): - // Good - process terminated - case <-time.After(2 * time.Second): - t.Fatal("process should have been killed") - } - }) - - t.Run("noop on completed process", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.Start(context.Background(), "echo", "done") - require.NoError(t, err) - - <-proc.Done() - - err = proc.Kill() - assert.NoError(t, err) - }) -} - -func TestProcess_SendInput(t *testing.T) { - t.Run("writes to stdin", func(t *testing.T) { - svc, _ := newTestService(t) - - // Use cat to echo back stdin - proc, err := svc.Start(context.Background(), "cat") - require.NoError(t, err) - - err = proc.SendInput("hello\n") - assert.NoError(t, err) - - err = proc.CloseStdin() - assert.NoError(t, err) - - <-proc.Done() - - assert.Contains(t, proc.Output(), "hello") - }) - - t.Run("error on completed process", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.Start(context.Background(), "echo", "done") - require.NoError(t, err) - - <-proc.Done() - - err = proc.SendInput("test") - assert.ErrorIs(t, err, ErrProcessNotRunning) - }) -} - -func TestProcess_CloseStdin(t *testing.T) { - t.Run("closes stdin pipe", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.Start(context.Background(), "cat") - require.NoError(t, err) - - err = proc.CloseStdin() - assert.NoError(t, err) - - // Process should exit now that stdin is closed - select { - case <-proc.Done(): - // Good - case <-time.After(2 * time.Second): - t.Fatal("cat should exit when stdin is closed") - } - }) - - t.Run("double close is safe", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.Start(context.Background(), "cat") - require.NoError(t, err) - - // First close - err = proc.CloseStdin() - assert.NoError(t, err) - - <-proc.Done() - - // Second close should be safe (stdin already nil) - err = proc.CloseStdin() - assert.NoError(t, err) - }) -} diff --git a/pkg/process/runner.go b/pkg/process/runner.go deleted file mode 100644 index effd39a..0000000 --- a/pkg/process/runner.go +++ /dev/null @@ -1,293 +0,0 @@ -package process - -import ( - "context" - "fmt" - "sync" - "time" -) - -// Runner orchestrates multiple processes with dependencies. -type Runner struct { - service *Service -} - -// NewRunner creates a runner for the given service. -func NewRunner(svc *Service) *Runner { - return &Runner{service: svc} -} - -// RunSpec defines a process to run with optional dependencies. -type RunSpec struct { - // Name is a friendly identifier (e.g., "lint", "test"). - Name string - // Command is the executable to run. - Command string - // Args are the command arguments. - Args []string - // Dir is the working directory. - Dir string - // Env are additional environment variables. - Env []string - // After lists spec names that must complete successfully first. - After []string - // AllowFailure if true, continues pipeline even if this spec fails. - AllowFailure bool -} - -// RunResult captures the outcome of a single process. -type RunResult struct { - Name string - Spec RunSpec - ExitCode int - Duration time.Duration - Output string - Error error - Skipped bool -} - -// Passed returns true if the process succeeded. -func (r RunResult) Passed() bool { - return !r.Skipped && r.Error == nil && r.ExitCode == 0 -} - -// RunAllResult is the aggregate result of running multiple specs. -type RunAllResult struct { - Results []RunResult - Duration time.Duration - Passed int - Failed int - Skipped int -} - -// Success returns true if all non-skipped specs passed. -func (r RunAllResult) Success() bool { - return r.Failed == 0 -} - -// RunAll executes specs respecting dependencies, parallelising where possible. -func (r *Runner) RunAll(ctx context.Context, specs []RunSpec) (*RunAllResult, error) { - start := time.Now() - - // Build dependency graph - specMap := make(map[string]RunSpec) - for _, spec := range specs { - specMap[spec.Name] = spec - } - - // Track completion - completed := make(map[string]*RunResult) - var completedMu sync.Mutex - - results := make([]RunResult, 0, len(specs)) - var resultsMu sync.Mutex - - // Process specs in waves - remaining := make(map[string]RunSpec) - for _, spec := range specs { - remaining[spec.Name] = spec - } - - for len(remaining) > 0 { - // Find specs ready to run (all dependencies satisfied) - ready := make([]RunSpec, 0) - for _, spec := range remaining { - if r.canRun(spec, completed) { - ready = append(ready, spec) - } - } - - if len(ready) == 0 && len(remaining) > 0 { - // Deadlock - circular dependency or missing specs - for name := range remaining { - results = append(results, RunResult{ - Name: name, - Spec: remaining[name], - Skipped: true, - Error: fmt.Errorf("circular dependency or missing dependency"), - }) - } - break - } - - // Run ready specs in parallel - var wg sync.WaitGroup - for _, spec := range ready { - wg.Add(1) - go func(spec RunSpec) { - defer wg.Done() - - // Check if dependencies failed - completedMu.Lock() - shouldSkip := false - for _, dep := range spec.After { - if result, ok := completed[dep]; ok { - if !result.Passed() && !specMap[dep].AllowFailure { - shouldSkip = true - break - } - } - } - completedMu.Unlock() - - var result RunResult - if shouldSkip { - result = RunResult{ - Name: spec.Name, - Spec: spec, - Skipped: true, - Error: fmt.Errorf("skipped due to dependency failure"), - } - } else { - result = r.runSpec(ctx, spec) - } - - completedMu.Lock() - completed[spec.Name] = &result - completedMu.Unlock() - - resultsMu.Lock() - results = append(results, result) - resultsMu.Unlock() - }(spec) - } - wg.Wait() - - // Remove completed from remaining - for _, spec := range ready { - delete(remaining, spec.Name) - } - } - - // Build aggregate result - aggResult := &RunAllResult{ - Results: results, - Duration: time.Since(start), - } - - for _, res := range results { - if res.Skipped { - aggResult.Skipped++ - } else if res.Passed() { - aggResult.Passed++ - } else { - aggResult.Failed++ - } - } - - return aggResult, nil -} - -// canRun checks if all dependencies are completed. -func (r *Runner) canRun(spec RunSpec, completed map[string]*RunResult) bool { - for _, dep := range spec.After { - if _, ok := completed[dep]; !ok { - return false - } - } - return true -} - -// runSpec executes a single spec. -func (r *Runner) runSpec(ctx context.Context, spec RunSpec) RunResult { - start := time.Now() - - proc, err := r.service.StartWithOptions(ctx, RunOptions{ - Command: spec.Command, - Args: spec.Args, - Dir: spec.Dir, - Env: spec.Env, - }) - if err != nil { - return RunResult{ - Name: spec.Name, - Spec: spec, - Duration: time.Since(start), - Error: err, - } - } - - <-proc.Done() - - return RunResult{ - Name: spec.Name, - Spec: spec, - ExitCode: proc.ExitCode, - Duration: proc.Duration, - Output: proc.Output(), - Error: nil, - } -} - -// RunSequential executes specs one after another, stopping on first failure. -func (r *Runner) RunSequential(ctx context.Context, specs []RunSpec) (*RunAllResult, error) { - start := time.Now() - results := make([]RunResult, 0, len(specs)) - - for _, spec := range specs { - result := r.runSpec(ctx, spec) - results = append(results, result) - - if !result.Passed() && !spec.AllowFailure { - // Mark remaining as skipped - for i := len(results); i < len(specs); i++ { - results = append(results, RunResult{ - Name: specs[i].Name, - Spec: specs[i], - Skipped: true, - }) - } - break - } - } - - aggResult := &RunAllResult{ - Results: results, - Duration: time.Since(start), - } - - for _, res := range results { - if res.Skipped { - aggResult.Skipped++ - } else if res.Passed() { - aggResult.Passed++ - } else { - aggResult.Failed++ - } - } - - return aggResult, nil -} - -// RunParallel executes all specs concurrently, regardless of dependencies. -func (r *Runner) RunParallel(ctx context.Context, specs []RunSpec) (*RunAllResult, error) { - start := time.Now() - results := make([]RunResult, len(specs)) - - var wg sync.WaitGroup - for i, spec := range specs { - wg.Add(1) - go func(i int, spec RunSpec) { - defer wg.Done() - results[i] = r.runSpec(ctx, spec) - }(i, spec) - } - wg.Wait() - - aggResult := &RunAllResult{ - Results: results, - Duration: time.Since(start), - } - - for _, res := range results { - if res.Skipped { - aggResult.Skipped++ - } else if res.Passed() { - aggResult.Passed++ - } else { - aggResult.Failed++ - } - } - - return aggResult, nil -} diff --git a/pkg/process/runner_test.go b/pkg/process/runner_test.go deleted file mode 100644 index 85d1a3e..0000000 --- a/pkg/process/runner_test.go +++ /dev/null @@ -1,176 +0,0 @@ -package process - -import ( - "context" - "testing" - - "github.com/host-uk/core/pkg/framework" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func newTestRunner(t *testing.T) *Runner { - t.Helper() - - core, err := framework.New( - framework.WithName("process", NewService(Options{})), - ) - require.NoError(t, err) - - svc, err := framework.ServiceFor[*Service](core, "process") - require.NoError(t, err) - - return NewRunner(svc) -} - -func TestRunner_RunSequential(t *testing.T) { - t.Run("all pass", func(t *testing.T) { - runner := newTestRunner(t) - - result, err := runner.RunSequential(context.Background(), []RunSpec{ - {Name: "first", Command: "echo", Args: []string{"1"}}, - {Name: "second", Command: "echo", Args: []string{"2"}}, - {Name: "third", Command: "echo", Args: []string{"3"}}, - }) - require.NoError(t, err) - - assert.True(t, result.Success()) - assert.Equal(t, 3, result.Passed) - assert.Equal(t, 0, result.Failed) - assert.Equal(t, 0, result.Skipped) - }) - - t.Run("stops on failure", func(t *testing.T) { - runner := newTestRunner(t) - - result, err := runner.RunSequential(context.Background(), []RunSpec{ - {Name: "first", Command: "echo", Args: []string{"1"}}, - {Name: "fails", Command: "sh", Args: []string{"-c", "exit 1"}}, - {Name: "third", Command: "echo", Args: []string{"3"}}, - }) - require.NoError(t, err) - - assert.False(t, result.Success()) - assert.Equal(t, 1, result.Passed) - assert.Equal(t, 1, result.Failed) - assert.Equal(t, 1, result.Skipped) - }) - - t.Run("allow failure continues", func(t *testing.T) { - runner := newTestRunner(t) - - result, err := runner.RunSequential(context.Background(), []RunSpec{ - {Name: "first", Command: "echo", Args: []string{"1"}}, - {Name: "fails", Command: "sh", Args: []string{"-c", "exit 1"}, AllowFailure: true}, - {Name: "third", Command: "echo", Args: []string{"3"}}, - }) - require.NoError(t, err) - - // Still counts as failed but pipeline continues - assert.Equal(t, 2, result.Passed) - assert.Equal(t, 1, result.Failed) - assert.Equal(t, 0, result.Skipped) - }) -} - -func TestRunner_RunParallel(t *testing.T) { - t.Run("all run concurrently", func(t *testing.T) { - runner := newTestRunner(t) - - result, err := runner.RunParallel(context.Background(), []RunSpec{ - {Name: "first", Command: "echo", Args: []string{"1"}}, - {Name: "second", Command: "echo", Args: []string{"2"}}, - {Name: "third", Command: "echo", Args: []string{"3"}}, - }) - require.NoError(t, err) - - assert.True(t, result.Success()) - assert.Equal(t, 3, result.Passed) - assert.Len(t, result.Results, 3) - }) - - t.Run("failure doesnt stop others", func(t *testing.T) { - runner := newTestRunner(t) - - result, err := runner.RunParallel(context.Background(), []RunSpec{ - {Name: "first", Command: "echo", Args: []string{"1"}}, - {Name: "fails", Command: "sh", Args: []string{"-c", "exit 1"}}, - {Name: "third", Command: "echo", Args: []string{"3"}}, - }) - require.NoError(t, err) - - assert.False(t, result.Success()) - assert.Equal(t, 2, result.Passed) - assert.Equal(t, 1, result.Failed) - }) -} - -func TestRunner_RunAll(t *testing.T) { - t.Run("respects dependencies", func(t *testing.T) { - runner := newTestRunner(t) - - result, err := runner.RunAll(context.Background(), []RunSpec{ - {Name: "third", Command: "echo", Args: []string{"3"}, After: []string{"second"}}, - {Name: "first", Command: "echo", Args: []string{"1"}}, - {Name: "second", Command: "echo", Args: []string{"2"}, After: []string{"first"}}, - }) - require.NoError(t, err) - - assert.True(t, result.Success()) - assert.Equal(t, 3, result.Passed) - }) - - t.Run("skips dependents on failure", func(t *testing.T) { - runner := newTestRunner(t) - - result, err := runner.RunAll(context.Background(), []RunSpec{ - {Name: "first", Command: "sh", Args: []string{"-c", "exit 1"}}, - {Name: "second", Command: "echo", Args: []string{"2"}, After: []string{"first"}}, - {Name: "third", Command: "echo", Args: []string{"3"}, After: []string{"second"}}, - }) - require.NoError(t, err) - - assert.False(t, result.Success()) - assert.Equal(t, 0, result.Passed) - assert.Equal(t, 1, result.Failed) - assert.Equal(t, 2, result.Skipped) - }) - - t.Run("parallel independent specs", func(t *testing.T) { - runner := newTestRunner(t) - - // These should run in parallel since they have no dependencies - result, err := runner.RunAll(context.Background(), []RunSpec{ - {Name: "a", Command: "echo", Args: []string{"a"}}, - {Name: "b", Command: "echo", Args: []string{"b"}}, - {Name: "c", Command: "echo", Args: []string{"c"}}, - {Name: "final", Command: "echo", Args: []string{"done"}, After: []string{"a", "b", "c"}}, - }) - require.NoError(t, err) - - assert.True(t, result.Success()) - assert.Equal(t, 4, result.Passed) - }) -} - -func TestRunResult_Passed(t *testing.T) { - t.Run("success", func(t *testing.T) { - r := RunResult{ExitCode: 0} - assert.True(t, r.Passed()) - }) - - t.Run("non-zero exit", func(t *testing.T) { - r := RunResult{ExitCode: 1} - assert.False(t, r.Passed()) - }) - - t.Run("skipped", func(t *testing.T) { - r := RunResult{ExitCode: 0, Skipped: true} - assert.False(t, r.Passed()) - }) - - t.Run("error", func(t *testing.T) { - r := RunResult{ExitCode: 0, Error: assert.AnError} - assert.False(t, r.Passed()) - }) -} diff --git a/pkg/process/service.go b/pkg/process/service.go deleted file mode 100644 index ab5683b..0000000 --- a/pkg/process/service.go +++ /dev/null @@ -1,378 +0,0 @@ -package process - -import ( - "bufio" - "context" - "errors" - "fmt" - "io" - "os/exec" - "sync" - "sync/atomic" - "time" - - "github.com/host-uk/core/pkg/framework" -) - -// Default buffer size for process output (1MB). -const DefaultBufferSize = 1024 * 1024 - -// Errors -var ( - ErrProcessNotFound = errors.New("process not found") - ErrProcessNotRunning = errors.New("process is not running") - ErrStdinNotAvailable = errors.New("stdin not available") -) - -// Service manages process execution with Core IPC integration. -type Service struct { - *framework.ServiceRuntime[Options] - - processes map[string]*Process - mu sync.RWMutex - bufSize int - idCounter atomic.Uint64 -} - -// Options configures the process service. -type Options struct { - // BufferSize is the ring buffer size for output capture. - // Default: 1MB (1024 * 1024 bytes). - BufferSize int -} - -// NewService creates a process service factory for Core registration. -// -// core, _ := framework.New( -// framework.WithName("process", process.NewService(process.Options{})), -// ) -func NewService(opts Options) func(*framework.Core) (any, error) { - return func(c *framework.Core) (any, error) { - if opts.BufferSize == 0 { - opts.BufferSize = DefaultBufferSize - } - svc := &Service{ - ServiceRuntime: framework.NewServiceRuntime(c, opts), - processes: make(map[string]*Process), - bufSize: opts.BufferSize, - } - return svc, nil - } -} - -// OnStartup implements framework.Startable. -func (s *Service) OnStartup(ctx context.Context) error { - return nil -} - -// OnShutdown implements framework.Stoppable. -// Kills all running processes on shutdown. -func (s *Service) OnShutdown(ctx context.Context) error { - s.mu.RLock() - procs := make([]*Process, 0, len(s.processes)) - for _, p := range s.processes { - if p.IsRunning() { - procs = append(procs, p) - } - } - s.mu.RUnlock() - - for _, p := range procs { - _ = p.Kill() - } - - return nil -} - -// Start spawns a new process with the given command and args. -func (s *Service) Start(ctx context.Context, command string, args ...string) (*Process, error) { - return s.StartWithOptions(ctx, RunOptions{ - Command: command, - Args: args, - }) -} - -// StartWithOptions spawns a process with full configuration. -func (s *Service) StartWithOptions(ctx context.Context, opts RunOptions) (*Process, error) { - id := fmt.Sprintf("proc-%d", s.idCounter.Add(1)) - - procCtx, cancel := context.WithCancel(ctx) - cmd := exec.CommandContext(procCtx, opts.Command, opts.Args...) - - if opts.Dir != "" { - cmd.Dir = opts.Dir - } - if len(opts.Env) > 0 { - cmd.Env = append(cmd.Environ(), opts.Env...) - } - - // Set up pipes - stdout, err := cmd.StdoutPipe() - if err != nil { - cancel() - return nil, fmt.Errorf("failed to create stdout pipe: %w", err) - } - - stderr, err := cmd.StderrPipe() - if err != nil { - cancel() - return nil, fmt.Errorf("failed to create stderr pipe: %w", err) - } - - stdin, err := cmd.StdinPipe() - if err != nil { - cancel() - return nil, fmt.Errorf("failed to create stdin pipe: %w", err) - } - - // Create output buffer (enabled by default) - var output *RingBuffer - if !opts.DisableCapture { - output = NewRingBuffer(s.bufSize) - } - - proc := &Process{ - ID: id, - Command: opts.Command, - Args: opts.Args, - Dir: opts.Dir, - Env: opts.Env, - StartedAt: time.Now(), - Status: StatusRunning, - cmd: cmd, - ctx: procCtx, - cancel: cancel, - output: output, - stdin: stdin, - done: make(chan struct{}), - } - - // Start the process - if err := cmd.Start(); err != nil { - cancel() - return nil, fmt.Errorf("failed to start process: %w", err) - } - - // Store process - s.mu.Lock() - s.processes[id] = proc - s.mu.Unlock() - - // Broadcast start - s.Core().ACTION(ActionProcessStarted{ - ID: id, - Command: opts.Command, - Args: opts.Args, - Dir: opts.Dir, - PID: cmd.Process.Pid, - }) - - // Stream output in goroutines - var wg sync.WaitGroup - wg.Add(2) - go func() { - defer wg.Done() - s.streamOutput(proc, stdout, StreamStdout) - }() - go func() { - defer wg.Done() - s.streamOutput(proc, stderr, StreamStderr) - }() - - // Wait for process completion - go func() { - // Wait for output streaming to complete - wg.Wait() - - // Wait for process exit - err := cmd.Wait() - - duration := time.Since(proc.StartedAt) - - proc.mu.Lock() - proc.Duration = duration - if err != nil { - var exitErr *exec.ExitError - if errors.As(err, &exitErr) { - proc.ExitCode = exitErr.ExitCode() - proc.Status = StatusExited - } else { - proc.Status = StatusFailed - } - } else { - proc.ExitCode = 0 - proc.Status = StatusExited - } - status := proc.Status - exitCode := proc.ExitCode - proc.mu.Unlock() - - close(proc.done) - - // Broadcast exit - var exitErr error - if status == StatusFailed { - exitErr = err - } - s.Core().ACTION(ActionProcessExited{ - ID: id, - ExitCode: exitCode, - Duration: duration, - Error: exitErr, - }) - }() - - return proc, nil -} - -// streamOutput reads from a pipe and broadcasts lines via ACTION. -func (s *Service) streamOutput(proc *Process, r io.Reader, stream Stream) { - scanner := bufio.NewScanner(r) - // Increase buffer for long lines - scanner.Buffer(make([]byte, 64*1024), 1024*1024) - - for scanner.Scan() { - line := scanner.Text() - - // Write to ring buffer - if proc.output != nil { - proc.output.Write([]byte(line + "\n")) - } - - // Broadcast output - s.Core().ACTION(ActionProcessOutput{ - ID: proc.ID, - Line: line, - Stream: stream, - }) - } -} - -// Get returns a process by ID. -func (s *Service) Get(id string) (*Process, error) { - s.mu.RLock() - defer s.mu.RUnlock() - - proc, ok := s.processes[id] - if !ok { - return nil, ErrProcessNotFound - } - return proc, nil -} - -// List returns all processes. -func (s *Service) List() []*Process { - s.mu.RLock() - defer s.mu.RUnlock() - - result := make([]*Process, 0, len(s.processes)) - for _, p := range s.processes { - result = append(result, p) - } - return result -} - -// Running returns all currently running processes. -func (s *Service) Running() []*Process { - s.mu.RLock() - defer s.mu.RUnlock() - - var result []*Process - for _, p := range s.processes { - if p.IsRunning() { - result = append(result, p) - } - } - return result -} - -// Kill terminates a process by ID. -func (s *Service) Kill(id string) error { - proc, err := s.Get(id) - if err != nil { - return err - } - - if err := proc.Kill(); err != nil { - return err - } - - s.Core().ACTION(ActionProcessKilled{ - ID: id, - Signal: "SIGKILL", - }) - - return nil -} - -// Remove removes a completed process from the list. -func (s *Service) Remove(id string) error { - s.mu.Lock() - defer s.mu.Unlock() - - proc, ok := s.processes[id] - if !ok { - return ErrProcessNotFound - } - - if proc.IsRunning() { - return errors.New("cannot remove running process") - } - - delete(s.processes, id) - return nil -} - -// Clear removes all completed processes. -func (s *Service) Clear() { - s.mu.Lock() - defer s.mu.Unlock() - - for id, p := range s.processes { - if !p.IsRunning() { - delete(s.processes, id) - } - } -} - -// Output returns the captured output of a process. -func (s *Service) Output(id string) (string, error) { - proc, err := s.Get(id) - if err != nil { - return "", err - } - return proc.Output(), nil -} - -// Run executes a command and waits for completion. -// Returns the combined output and any error. -func (s *Service) Run(ctx context.Context, command string, args ...string) (string, error) { - proc, err := s.Start(ctx, command, args...) - if err != nil { - return "", err - } - - <-proc.Done() - - output := proc.Output() - if proc.ExitCode != 0 { - return output, fmt.Errorf("process exited with code %d", proc.ExitCode) - } - return output, nil -} - -// RunWithOptions executes a command with options and waits for completion. -func (s *Service) RunWithOptions(ctx context.Context, opts RunOptions) (string, error) { - proc, err := s.StartWithOptions(ctx, opts) - if err != nil { - return "", err - } - - <-proc.Done() - - output := proc.Output() - if proc.ExitCode != 0 { - return output, fmt.Errorf("process exited with code %d", proc.ExitCode) - } - return output, nil -} diff --git a/pkg/process/service_test.go b/pkg/process/service_test.go deleted file mode 100644 index dba9d82..0000000 --- a/pkg/process/service_test.go +++ /dev/null @@ -1,258 +0,0 @@ -package process - -import ( - "context" - "strings" - "sync" - "testing" - "time" - - "github.com/host-uk/core/pkg/framework" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func newTestService(t *testing.T) (*Service, *framework.Core) { - t.Helper() - - core, err := framework.New( - framework.WithName("process", NewService(Options{BufferSize: 1024})), - ) - require.NoError(t, err) - - svc, err := framework.ServiceFor[*Service](core, "process") - require.NoError(t, err) - - return svc, core -} - -func TestService_Start(t *testing.T) { - t.Run("echo command", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.Start(context.Background(), "echo", "hello") - require.NoError(t, err) - require.NotNil(t, proc) - - assert.NotEmpty(t, proc.ID) - assert.Equal(t, "echo", proc.Command) - assert.Equal(t, []string{"hello"}, proc.Args) - - // Wait for completion - <-proc.Done() - - assert.Equal(t, StatusExited, proc.Status) - assert.Equal(t, 0, proc.ExitCode) - assert.Contains(t, proc.Output(), "hello") - }) - - t.Run("failing command", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.Start(context.Background(), "sh", "-c", "exit 42") - require.NoError(t, err) - - <-proc.Done() - - assert.Equal(t, StatusExited, proc.Status) - assert.Equal(t, 42, proc.ExitCode) - }) - - t.Run("non-existent command", func(t *testing.T) { - svc, _ := newTestService(t) - - _, err := svc.Start(context.Background(), "nonexistent_command_xyz") - assert.Error(t, err) - }) - - t.Run("with working directory", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, err := svc.StartWithOptions(context.Background(), RunOptions{ - Command: "pwd", - Dir: "/tmp", - }) - require.NoError(t, err) - - <-proc.Done() - - // On macOS /tmp is a symlink to /private/tmp - output := strings.TrimSpace(proc.Output()) - assert.True(t, output == "/tmp" || output == "/private/tmp", "got: %s", output) - }) - - t.Run("context cancellation", func(t *testing.T) { - svc, _ := newTestService(t) - - ctx, cancel := context.WithCancel(context.Background()) - proc, err := svc.Start(ctx, "sleep", "10") - require.NoError(t, err) - - // Cancel immediately - cancel() - - select { - case <-proc.Done(): - // Good - process was killed - case <-time.After(2 * time.Second): - t.Fatal("process should have been killed") - } - }) -} - -func TestService_Run(t *testing.T) { - t.Run("returns output", func(t *testing.T) { - svc, _ := newTestService(t) - - output, err := svc.Run(context.Background(), "echo", "hello world") - require.NoError(t, err) - assert.Contains(t, output, "hello world") - }) - - t.Run("returns error on failure", func(t *testing.T) { - svc, _ := newTestService(t) - - _, err := svc.Run(context.Background(), "sh", "-c", "exit 1") - assert.Error(t, err) - assert.Contains(t, err.Error(), "exited with code 1") - }) -} - -func TestService_Actions(t *testing.T) { - t.Run("broadcasts events", func(t *testing.T) { - core, err := framework.New( - framework.WithName("process", NewService(Options{})), - ) - require.NoError(t, err) - - var started []ActionProcessStarted - var outputs []ActionProcessOutput - var exited []ActionProcessExited - var mu sync.Mutex - - core.RegisterAction(func(c *framework.Core, msg framework.Message) error { - mu.Lock() - defer mu.Unlock() - switch m := msg.(type) { - case ActionProcessStarted: - started = append(started, m) - case ActionProcessOutput: - outputs = append(outputs, m) - case ActionProcessExited: - exited = append(exited, m) - } - return nil - }) - - svc, _ := framework.ServiceFor[*Service](core, "process") - proc, err := svc.Start(context.Background(), "echo", "test") - require.NoError(t, err) - - <-proc.Done() - - // Give time for events to propagate - time.Sleep(10 * time.Millisecond) - - mu.Lock() - defer mu.Unlock() - - assert.Len(t, started, 1) - assert.Equal(t, "echo", started[0].Command) - assert.Equal(t, []string{"test"}, started[0].Args) - - assert.NotEmpty(t, outputs) - foundTest := false - for _, o := range outputs { - if strings.Contains(o.Line, "test") { - foundTest = true - break - } - } - assert.True(t, foundTest, "should have output containing 'test'") - - assert.Len(t, exited, 1) - assert.Equal(t, 0, exited[0].ExitCode) - }) -} - -func TestService_List(t *testing.T) { - t.Run("tracks processes", func(t *testing.T) { - svc, _ := newTestService(t) - - proc1, _ := svc.Start(context.Background(), "echo", "1") - proc2, _ := svc.Start(context.Background(), "echo", "2") - - <-proc1.Done() - <-proc2.Done() - - list := svc.List() - assert.Len(t, list, 2) - }) - - t.Run("get by id", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, _ := svc.Start(context.Background(), "echo", "test") - <-proc.Done() - - got, err := svc.Get(proc.ID) - require.NoError(t, err) - assert.Equal(t, proc.ID, got.ID) - }) - - t.Run("get not found", func(t *testing.T) { - svc, _ := newTestService(t) - - _, err := svc.Get("nonexistent") - assert.ErrorIs(t, err, ErrProcessNotFound) - }) -} - -func TestService_Remove(t *testing.T) { - t.Run("removes completed process", func(t *testing.T) { - svc, _ := newTestService(t) - - proc, _ := svc.Start(context.Background(), "echo", "test") - <-proc.Done() - - err := svc.Remove(proc.ID) - require.NoError(t, err) - - _, err = svc.Get(proc.ID) - assert.ErrorIs(t, err, ErrProcessNotFound) - }) - - t.Run("cannot remove running process", func(t *testing.T) { - svc, _ := newTestService(t) - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - proc, _ := svc.Start(ctx, "sleep", "10") - - err := svc.Remove(proc.ID) - assert.Error(t, err) - - cancel() - <-proc.Done() - }) -} - -func TestService_Clear(t *testing.T) { - t.Run("clears completed processes", func(t *testing.T) { - svc, _ := newTestService(t) - - proc1, _ := svc.Start(context.Background(), "echo", "1") - proc2, _ := svc.Start(context.Background(), "echo", "2") - - <-proc1.Done() - <-proc2.Done() - - assert.Len(t, svc.List(), 2) - - svc.Clear() - - assert.Len(t, svc.List(), 0) - }) -} - diff --git a/pkg/process/types.go b/pkg/process/types.go deleted file mode 100644 index 74e03a6..0000000 --- a/pkg/process/types.go +++ /dev/null @@ -1,86 +0,0 @@ -// Package process provides process management with Core IPC integration. -// -// The process package enables spawning, monitoring, and controlling external -// processes with output streaming via the Core ACTION system. -// -// # Getting Started -// -// // Register with Core -// core, _ := framework.New( -// framework.WithName("process", process.NewService(process.Options{})), -// ) -// -// // Get service and run a process -// svc := framework.MustServiceFor[*process.Service](core, "process") -// proc, _ := svc.Start(ctx, "go", "test", "./...") -// -// # Listening for Events -// -// Process events are broadcast via Core.ACTION: -// -// core.RegisterAction(func(c *framework.Core, msg framework.Message) error { -// switch m := msg.(type) { -// case process.ActionProcessOutput: -// fmt.Print(m.Line) -// case process.ActionProcessExited: -// fmt.Printf("Exit code: %d\n", m.ExitCode) -// } -// return nil -// }) -package process - -import "time" - -// Status represents the process lifecycle state. -type Status string - -const ( - // StatusPending indicates the process is queued but not yet started. - StatusPending Status = "pending" - // StatusRunning indicates the process is actively executing. - StatusRunning Status = "running" - // StatusExited indicates the process completed (check ExitCode). - StatusExited Status = "exited" - // StatusFailed indicates the process could not be started. - StatusFailed Status = "failed" - // StatusKilled indicates the process was terminated by signal. - StatusKilled Status = "killed" -) - -// Stream identifies the output source. -type Stream string - -const ( - // StreamStdout is standard output. - StreamStdout Stream = "stdout" - // StreamStderr is standard error. - StreamStderr Stream = "stderr" -) - -// RunOptions configures process execution. -type RunOptions struct { - // Command is the executable to run. - Command string - // Args are the command arguments. - Args []string - // Dir is the working directory (empty = current). - Dir string - // Env are additional environment variables (KEY=VALUE format). - Env []string - // DisableCapture disables output buffering. - // By default, output is captured to a ring buffer. - DisableCapture bool -} - -// Info provides a snapshot of process state without internal fields. -type Info struct { - ID string `json:"id"` - Command string `json:"command"` - Args []string `json:"args"` - Dir string `json:"dir"` - StartedAt time.Time `json:"startedAt"` - Status Status `json:"status"` - ExitCode int `json:"exitCode"` - Duration time.Duration `json:"duration"` - PID int `json:"pid"` -} diff --git a/pkg/qa/cmd_health.go b/pkg/qa/cmd_health.go deleted file mode 100644 index 1a3d4b1..0000000 --- a/pkg/qa/cmd_health.go +++ /dev/null @@ -1,288 +0,0 @@ -// cmd_health.go implements the 'qa health' command for aggregate CI health. -// -// Usage: -// core qa health # Show CI health summary -// core qa health --problems # Show only repos with problems - -package qa - -import ( - "encoding/json" - "os/exec" - "sort" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/errors" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" -) - -// Health command flags -var ( - healthProblems bool - healthRegistry string -) - -// HealthWorkflowRun represents a GitHub Actions workflow run -type HealthWorkflowRun struct { - Status string `json:"status"` - Conclusion string `json:"conclusion"` - Name string `json:"name"` - HeadSha string `json:"headSha"` - UpdatedAt string `json:"updatedAt"` - URL string `json:"url"` -} - -// RepoHealth represents the CI health of a single repo -type RepoHealth struct { - Name string - Status string // "passing", "failing", "pending", "no_ci", "disabled" - Message string - URL string - FailingSince string -} - -// addHealthCommand adds the 'health' subcommand to qa. -func addHealthCommand(parent *cli.Command) { - healthCmd := &cli.Command{ - Use: "health", - Short: i18n.T("cmd.qa.health.short"), - Long: i18n.T("cmd.qa.health.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runHealth() - }, - } - - healthCmd.Flags().BoolVarP(&healthProblems, "problems", "p", false, i18n.T("cmd.qa.health.flag.problems")) - healthCmd.Flags().StringVar(&healthRegistry, "registry", "", i18n.T("common.flag.registry")) - - parent.AddCommand(healthCmd) -} - -func runHealth() error { - // Check gh is available - if _, err := exec.LookPath("gh"); err != nil { - return errors.E("qa.health", i18n.T("error.gh_not_found"), nil) - } - - // Load registry - var reg *repos.Registry - var err error - - if healthRegistry != "" { - reg, err = repos.LoadRegistry(healthRegistry) - } else { - registryPath, findErr := repos.FindRegistry() - if findErr != nil { - return errors.E("qa.health", i18n.T("error.registry_not_found"), nil) - } - reg, err = repos.LoadRegistry(registryPath) - } - if err != nil { - return errors.E("qa.health", "failed to load registry", err) - } - - // Fetch CI status from all repos - var healthResults []RepoHealth - repoList := reg.List() - - for i, repo := range repoList { - cli.Print("\033[2K\r%s %d/%d %s", - dimStyle.Render(i18n.T("cmd.qa.issues.fetching")), - i+1, len(repoList), repo.Name) - - health := fetchRepoHealth(reg.Org, repo.Name) - healthResults = append(healthResults, health) - } - cli.Print("\033[2K\r") // Clear progress - - // Sort: problems first, then passing - sort.Slice(healthResults, func(i, j int) bool { - return healthPriority(healthResults[i].Status) < healthPriority(healthResults[j].Status) - }) - - // Filter if --problems flag - if healthProblems { - var problems []RepoHealth - for _, h := range healthResults { - if h.Status != "passing" { - problems = append(problems, h) - } - } - healthResults = problems - } - - // Calculate summary - passing := 0 - for _, h := range healthResults { - if h.Status == "passing" { - passing++ - } - } - total := len(repoList) - percentage := 0 - if total > 0 { - percentage = (passing * 100) / total - } - - // Print summary - cli.Print("%s: %d/%d repos healthy (%d%%)\n\n", - i18n.T("cmd.qa.health.summary"), - passing, total, percentage) - - if len(healthResults) == 0 { - cli.Text(i18n.T("cmd.qa.health.all_healthy")) - return nil - } - - // Group by status - grouped := make(map[string][]RepoHealth) - for _, h := range healthResults { - grouped[h.Status] = append(grouped[h.Status], h) - } - - // Print problems first - printHealthGroup("failing", grouped["failing"], errorStyle) - printHealthGroup("pending", grouped["pending"], warningStyle) - printHealthGroup("no_ci", grouped["no_ci"], dimStyle) - printHealthGroup("disabled", grouped["disabled"], dimStyle) - - if !healthProblems { - printHealthGroup("passing", grouped["passing"], successStyle) - } - - return nil -} - -func fetchRepoHealth(org, repoName string) RepoHealth { - repoFullName := cli.Sprintf("%s/%s", org, repoName) - - args := []string{ - "run", "list", - "--repo", repoFullName, - "--limit", "1", - "--json", "status,conclusion,name,headSha,updatedAt,url", - } - - cmd := exec.Command("gh", args...) - output, err := cmd.Output() - if err != nil { - // Check if it's a 404 (no workflows) - if exitErr, ok := err.(*exec.ExitError); ok { - stderr := string(exitErr.Stderr) - if strings.Contains(stderr, "no workflows") || strings.Contains(stderr, "not found") { - return RepoHealth{ - Name: repoName, - Status: "no_ci", - Message: i18n.T("cmd.qa.health.no_ci_configured"), - } - } - } - return RepoHealth{ - Name: repoName, - Status: "no_ci", - Message: i18n.T("cmd.qa.health.fetch_error"), - } - } - - var runs []HealthWorkflowRun - if err := json.Unmarshal(output, &runs); err != nil { - return RepoHealth{ - Name: repoName, - Status: "no_ci", - Message: i18n.T("cmd.qa.health.parse_error"), - } - } - - if len(runs) == 0 { - return RepoHealth{ - Name: repoName, - Status: "no_ci", - Message: i18n.T("cmd.qa.health.no_ci_configured"), - } - } - - run := runs[0] - health := RepoHealth{ - Name: repoName, - URL: run.URL, - } - - switch run.Status { - case "completed": - switch run.Conclusion { - case "success": - health.Status = "passing" - health.Message = i18n.T("cmd.qa.health.passing") - case "failure": - health.Status = "failing" - health.Message = i18n.T("cmd.qa.health.tests_failing") - case "cancelled": - health.Status = "pending" - health.Message = i18n.T("cmd.qa.health.cancelled") - case "skipped": - health.Status = "passing" - health.Message = i18n.T("cmd.qa.health.skipped") - default: - health.Status = "failing" - health.Message = run.Conclusion - } - case "in_progress", "queued", "waiting": - health.Status = "pending" - health.Message = i18n.T("cmd.qa.health.running") - default: - health.Status = "no_ci" - health.Message = run.Status - } - - return health -} - -func healthPriority(status string) int { - switch status { - case "failing": - return 0 - case "pending": - return 1 - case "no_ci": - return 2 - case "disabled": - return 3 - case "passing": - return 4 - default: - return 5 - } -} - -func printHealthGroup(status string, repos []RepoHealth, style *cli.AnsiStyle) { - if len(repos) == 0 { - return - } - - var label string - switch status { - case "failing": - label = i18n.T("cmd.qa.health.count_failing") - case "pending": - label = i18n.T("cmd.qa.health.count_pending") - case "no_ci": - label = i18n.T("cmd.qa.health.count_no_ci") - case "disabled": - label = i18n.T("cmd.qa.health.count_disabled") - case "passing": - label = i18n.T("cmd.qa.health.count_passing") - } - - cli.Print("%s (%d):\n", style.Render(label), len(repos)) - for _, repo := range repos { - cli.Print(" %s %s\n", - cli.RepoStyle.Render(repo.Name), - dimStyle.Render(repo.Message)) - if repo.URL != "" && status == "failing" { - cli.Print(" -> %s\n", dimStyle.Render(repo.URL)) - } - } - cli.Blank() -} diff --git a/pkg/qa/cmd_issues.go b/pkg/qa/cmd_issues.go deleted file mode 100644 index d243fc0..0000000 --- a/pkg/qa/cmd_issues.go +++ /dev/null @@ -1,400 +0,0 @@ -// cmd_issues.go implements the 'qa issues' command for intelligent issue triage. -// -// Usage: -// core qa issues # Show prioritised, actionable issues -// core qa issues --mine # Show issues assigned to you -// core qa issues --triage # Show issues needing triage (no labels/assignee) -// core qa issues --blocked # Show blocked issues - -package qa - -import ( - "encoding/json" - "os/exec" - "sort" - "strings" - "time" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/errors" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" -) - -// Issue command flags -var ( - issuesMine bool - issuesTriage bool - issuesBlocked bool - issuesRegistry string - issuesLimit int -) - -// Issue represents a GitHub issue with triage metadata -type Issue struct { - Number int `json:"number"` - Title string `json:"title"` - State string `json:"state"` - Body string `json:"body"` - CreatedAt time.Time `json:"createdAt"` - UpdatedAt time.Time `json:"updatedAt"` - Author struct { - Login string `json:"login"` - } `json:"author"` - Assignees struct { - Nodes []struct { - Login string `json:"login"` - } `json:"nodes"` - } `json:"assignees"` - Labels struct { - Nodes []struct { - Name string `json:"name"` - } `json:"nodes"` - } `json:"labels"` - Comments struct { - TotalCount int `json:"totalCount"` - Nodes []struct { - Author struct { - Login string `json:"login"` - } `json:"author"` - CreatedAt time.Time `json:"createdAt"` - } `json:"nodes"` - } `json:"comments"` - URL string `json:"url"` - - // Computed fields - RepoName string - Priority int // Lower = higher priority - Category string // "needs_response", "ready", "blocked", "triage" - ActionHint string -} - -// addIssuesCommand adds the 'issues' subcommand to qa. -func addIssuesCommand(parent *cli.Command) { - issuesCmd := &cli.Command{ - Use: "issues", - Short: i18n.T("cmd.qa.issues.short"), - Long: i18n.T("cmd.qa.issues.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runQAIssues() - }, - } - - issuesCmd.Flags().BoolVarP(&issuesMine, "mine", "m", false, i18n.T("cmd.qa.issues.flag.mine")) - issuesCmd.Flags().BoolVarP(&issuesTriage, "triage", "t", false, i18n.T("cmd.qa.issues.flag.triage")) - issuesCmd.Flags().BoolVarP(&issuesBlocked, "blocked", "b", false, i18n.T("cmd.qa.issues.flag.blocked")) - issuesCmd.Flags().StringVar(&issuesRegistry, "registry", "", i18n.T("common.flag.registry")) - issuesCmd.Flags().IntVarP(&issuesLimit, "limit", "l", 50, i18n.T("cmd.qa.issues.flag.limit")) - - parent.AddCommand(issuesCmd) -} - -func runQAIssues() error { - // Check gh is available - if _, err := exec.LookPath("gh"); err != nil { - return errors.E("qa.issues", i18n.T("error.gh_not_found"), nil) - } - - // Load registry - var reg *repos.Registry - var err error - - if issuesRegistry != "" { - reg, err = repos.LoadRegistry(issuesRegistry) - } else { - registryPath, findErr := repos.FindRegistry() - if findErr != nil { - return errors.E("qa.issues", i18n.T("error.registry_not_found"), nil) - } - reg, err = repos.LoadRegistry(registryPath) - } - if err != nil { - return errors.E("qa.issues", "failed to load registry", err) - } - - // Fetch issues from all repos - var allIssues []Issue - repoList := reg.List() - - for i, repo := range repoList { - cli.Print("\033[2K\r%s %d/%d %s", - dimStyle.Render(i18n.T("cmd.qa.issues.fetching")), - i+1, len(repoList), repo.Name) - - issues, err := fetchQAIssues(reg.Org, repo.Name, issuesLimit) - if err != nil { - continue // Skip repos with errors - } - allIssues = append(allIssues, issues...) - } - cli.Print("\033[2K\r") // Clear progress - - if len(allIssues) == 0 { - cli.Text(i18n.T("cmd.qa.issues.no_issues")) - return nil - } - - // Categorise and prioritise issues - categorised := categoriseIssues(allIssues) - - // Filter based on flags - if issuesMine { - categorised = filterMine(categorised) - } - if issuesTriage { - categorised = filterCategory(categorised, "triage") - } - if issuesBlocked { - categorised = filterCategory(categorised, "blocked") - } - - // Print categorised issues - printCategorisedIssues(categorised) - - return nil -} - -func fetchQAIssues(org, repoName string, limit int) ([]Issue, error) { - repoFullName := cli.Sprintf("%s/%s", org, repoName) - - args := []string{ - "issue", "list", - "--repo", repoFullName, - "--state", "open", - "--limit", cli.Sprintf("%d", limit), - "--json", "number,title,state,body,createdAt,updatedAt,author,assignees,labels,comments,url", - } - - cmd := exec.Command("gh", args...) - output, err := cmd.Output() - if err != nil { - return nil, err - } - - var issues []Issue - if err := json.Unmarshal(output, &issues); err != nil { - return nil, err - } - - // Tag with repo name - for i := range issues { - issues[i].RepoName = repoName - } - - return issues, nil -} - -func categoriseIssues(issues []Issue) map[string][]Issue { - result := map[string][]Issue{ - "needs_response": {}, - "ready": {}, - "blocked": {}, - "triage": {}, - } - - currentUser := getCurrentUser() - - for i := range issues { - issue := &issues[i] - categoriseIssue(issue, currentUser) - result[issue.Category] = append(result[issue.Category], *issue) - } - - // Sort each category by priority - for cat := range result { - sort.Slice(result[cat], func(i, j int) bool { - return result[cat][i].Priority < result[cat][j].Priority - }) - } - - return result -} - -func categoriseIssue(issue *Issue, currentUser string) { - labels := getLabels(issue) - - // Check if blocked - for _, l := range labels { - if strings.HasPrefix(l, "blocked") || l == "waiting" { - issue.Category = "blocked" - issue.Priority = 30 - issue.ActionHint = i18n.T("cmd.qa.issues.hint.blocked") - return - } - } - - // Check if needs triage (no labels, no assignee) - if len(issue.Labels.Nodes) == 0 && len(issue.Assignees.Nodes) == 0 { - issue.Category = "triage" - issue.Priority = 20 - issue.ActionHint = i18n.T("cmd.qa.issues.hint.triage") - return - } - - // Check if needs response (recent comment from someone else) - if issue.Comments.TotalCount > 0 && len(issue.Comments.Nodes) > 0 { - lastComment := issue.Comments.Nodes[len(issue.Comments.Nodes)-1] - // If last comment is not from current user and is recent - if lastComment.Author.Login != currentUser { - age := time.Since(lastComment.CreatedAt) - if age < 48*time.Hour { - issue.Category = "needs_response" - issue.Priority = 10 - issue.ActionHint = cli.Sprintf("@%s %s", lastComment.Author.Login, i18n.T("cmd.qa.issues.hint.needs_response")) - return - } - } - } - - // Default: ready to work - issue.Category = "ready" - issue.Priority = calculatePriority(issue, labels) - issue.ActionHint = "" -} - -func calculatePriority(issue *Issue, labels []string) int { - priority := 50 - - // Priority labels - for _, l := range labels { - switch { - case strings.Contains(l, "critical") || strings.Contains(l, "urgent"): - priority = 1 - case strings.Contains(l, "high"): - priority = 10 - case strings.Contains(l, "medium"): - priority = 30 - case strings.Contains(l, "low"): - priority = 70 - case l == "good-first-issue" || l == "good first issue": - priority = min(priority, 15) // Boost good first issues - case l == "help-wanted" || l == "help wanted": - priority = min(priority, 20) - case l == "agent:ready" || l == "agentic": - priority = min(priority, 5) // AI-ready issues are high priority - } - } - - return priority -} - -func getLabels(issue *Issue) []string { - var labels []string - for _, l := range issue.Labels.Nodes { - labels = append(labels, strings.ToLower(l.Name)) - } - return labels -} - -func getCurrentUser() string { - cmd := exec.Command("gh", "api", "user", "--jq", ".login") - output, err := cmd.Output() - if err != nil { - return "" - } - return strings.TrimSpace(string(output)) -} - -func filterMine(categorised map[string][]Issue) map[string][]Issue { - currentUser := getCurrentUser() - result := make(map[string][]Issue) - - for cat, issues := range categorised { - var filtered []Issue - for _, issue := range issues { - for _, a := range issue.Assignees.Nodes { - if a.Login == currentUser { - filtered = append(filtered, issue) - break - } - } - } - if len(filtered) > 0 { - result[cat] = filtered - } - } - - return result -} - -func filterCategory(categorised map[string][]Issue, category string) map[string][]Issue { - if issues, ok := categorised[category]; ok && len(issues) > 0 { - return map[string][]Issue{category: issues} - } - return map[string][]Issue{} -} - -func printCategorisedIssues(categorised map[string][]Issue) { - // Print in order: needs_response, ready, blocked, triage - categories := []struct { - key string - title string - style *cli.AnsiStyle - }{ - {"needs_response", i18n.T("cmd.qa.issues.category.needs_response"), warningStyle}, - {"ready", i18n.T("cmd.qa.issues.category.ready"), successStyle}, - {"blocked", i18n.T("cmd.qa.issues.category.blocked"), errorStyle}, - {"triage", i18n.T("cmd.qa.issues.category.triage"), dimStyle}, - } - - first := true - for _, cat := range categories { - issues := categorised[cat.key] - if len(issues) == 0 { - continue - } - - if !first { - cli.Blank() - } - first = false - - cli.Print("%s (%d):\n", cat.style.Render(cat.title), len(issues)) - - for _, issue := range issues { - printTriagedIssue(issue) - } - } - - if first { - cli.Text(i18n.T("cmd.qa.issues.no_issues")) - } -} - -func printTriagedIssue(issue Issue) { - // #42 [core-bio] Fix avatar upload - num := cli.TitleStyle.Render(cli.Sprintf("#%d", issue.Number)) - repo := dimStyle.Render(cli.Sprintf("[%s]", issue.RepoName)) - title := cli.ValueStyle.Render(truncate(issue.Title, 50)) - - cli.Print(" %s %s %s", num, repo, title) - - // Add labels if priority-related - var importantLabels []string - for _, l := range issue.Labels.Nodes { - name := strings.ToLower(l.Name) - if strings.Contains(name, "priority") || strings.Contains(name, "critical") || - name == "good-first-issue" || name == "agent:ready" || name == "agentic" { - importantLabels = append(importantLabels, l.Name) - } - } - if len(importantLabels) > 0 { - cli.Print(" %s", warningStyle.Render("["+strings.Join(importantLabels, ", ")+"]")) - } - - // Add age - age := cli.FormatAge(issue.UpdatedAt) - cli.Print(" %s\n", dimStyle.Render(age)) - - // Add action hint if present - if issue.ActionHint != "" { - cli.Print(" %s %s\n", dimStyle.Render("->"), issue.ActionHint) - } -} - -func min(a, b int) int { - if a < b { - return a - } - return b -} diff --git a/pkg/qa/cmd_qa.go b/pkg/qa/cmd_qa.go deleted file mode 100644 index 9d69911..0000000 --- a/pkg/qa/cmd_qa.go +++ /dev/null @@ -1,44 +0,0 @@ -// Package qa provides quality assurance workflow commands. -// -// Unlike `core dev` which is about doing work (commit, push, pull), -// `core qa` is about verifying work (CI status, reviews, issues). -// -// Commands: -// - watch: Monitor GitHub Actions after a push, report actionable data -// - review: PR review status with actionable next steps -// - health: Aggregate CI health across all repos -// - issues: Intelligent issue triage -package qa - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -func init() { - cli.RegisterCommands(AddQACommands) -} - -// Style aliases from shared package -var ( - successStyle = cli.SuccessStyle - errorStyle = cli.ErrorStyle - warningStyle = cli.WarningStyle - dimStyle = cli.DimStyle -) - -// AddQACommands registers the 'qa' command and all subcommands. -func AddQACommands(root *cli.Command) { - qaCmd := &cli.Command{ - Use: "qa", - Short: i18n.T("cmd.qa.short"), - Long: i18n.T("cmd.qa.long"), - } - root.AddCommand(qaCmd) - - // Subcommands - addWatchCommand(qaCmd) - addReviewCommand(qaCmd) - addHealthCommand(qaCmd) - addIssuesCommand(qaCmd) -} diff --git a/pkg/qa/cmd_review.go b/pkg/qa/cmd_review.go deleted file mode 100644 index 3094585..0000000 --- a/pkg/qa/cmd_review.go +++ /dev/null @@ -1,322 +0,0 @@ -// cmd_review.go implements the 'qa review' command for PR review status. -// -// Usage: -// core qa review # Show all PRs needing attention -// core qa review --mine # Show status of your open PRs -// core qa review --requested # Show PRs you need to review - -package qa - -import ( - "context" - "encoding/json" - "fmt" - "os/exec" - "strings" - "time" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/errors" - "github.com/host-uk/core/pkg/i18n" -) - -// Review command flags -var ( - reviewMine bool - reviewRequested bool - reviewRepo string -) - -// PullRequest represents a GitHub pull request -type PullRequest struct { - Number int `json:"number"` - Title string `json:"title"` - Author Author `json:"author"` - State string `json:"state"` - IsDraft bool `json:"isDraft"` - Mergeable string `json:"mergeable"` - ReviewDecision string `json:"reviewDecision"` - URL string `json:"url"` - HeadRefName string `json:"headRefName"` - CreatedAt time.Time `json:"createdAt"` - UpdatedAt time.Time `json:"updatedAt"` - Additions int `json:"additions"` - Deletions int `json:"deletions"` - ChangedFiles int `json:"changedFiles"` - StatusChecks *StatusCheckRollup `json:"statusCheckRollup"` - ReviewRequests ReviewRequests `json:"reviewRequests"` - Reviews []Review `json:"reviews"` -} - -// Author represents a GitHub user -type Author struct { - Login string `json:"login"` -} - -// StatusCheckRollup contains CI check status -type StatusCheckRollup struct { - Contexts []StatusContext `json:"contexts"` -} - -// StatusContext represents a single check -type StatusContext struct { - State string `json:"state"` - Conclusion string `json:"conclusion"` - Name string `json:"name"` -} - -// ReviewRequests contains pending review requests -type ReviewRequests struct { - Nodes []ReviewRequest `json:"nodes"` -} - -// ReviewRequest represents a review request -type ReviewRequest struct { - RequestedReviewer Author `json:"requestedReviewer"` -} - -// Review represents a PR review -type Review struct { - Author Author `json:"author"` - State string `json:"state"` -} - -// addReviewCommand adds the 'review' subcommand to the qa command. -func addReviewCommand(parent *cli.Command) { - reviewCmd := &cli.Command{ - Use: "review", - Short: i18n.T("cmd.qa.review.short"), - Long: i18n.T("cmd.qa.review.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runReview() - }, - } - - reviewCmd.Flags().BoolVarP(&reviewMine, "mine", "m", false, i18n.T("cmd.qa.review.flag.mine")) - reviewCmd.Flags().BoolVarP(&reviewRequested, "requested", "r", false, i18n.T("cmd.qa.review.flag.requested")) - reviewCmd.Flags().StringVar(&reviewRepo, "repo", "", i18n.T("cmd.qa.review.flag.repo")) - - parent.AddCommand(reviewCmd) -} - -func runReview() error { - // Check gh is available - if _, err := exec.LookPath("gh"); err != nil { - return errors.E("qa.review", i18n.T("error.gh_not_found"), nil) - } - - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - - // Determine repo - repoFullName := reviewRepo - if repoFullName == "" { - var err error - repoFullName, err = detectRepoFromGit() - if err != nil { - return errors.E("qa.review", i18n.T("cmd.qa.review.error.no_repo"), nil) - } - } - - // Default: show both mine and requested if neither flag is set - showMine := reviewMine || (!reviewMine && !reviewRequested) - showRequested := reviewRequested || (!reviewMine && !reviewRequested) - - if showMine { - if err := showMyPRs(ctx, repoFullName); err != nil { - return err - } - } - - if showRequested { - if showMine { - cli.Blank() - } - if err := showRequestedReviews(ctx, repoFullName); err != nil { - return err - } - } - - return nil -} - -// showMyPRs shows the user's open PRs with status -func showMyPRs(ctx context.Context, repo string) error { - prs, err := fetchPRs(ctx, repo, "author:@me") - if err != nil { - return errors.E("qa.review", "failed to fetch your PRs", err) - } - - if len(prs) == 0 { - cli.Print("%s\n", dimStyle.Render(i18n.T("cmd.qa.review.no_prs"))) - return nil - } - - cli.Print("%s (%d):\n", i18n.T("cmd.qa.review.your_prs"), len(prs)) - - for _, pr := range prs { - printPRStatus(pr) - } - - return nil -} - -// showRequestedReviews shows PRs where user's review is requested -func showRequestedReviews(ctx context.Context, repo string) error { - prs, err := fetchPRs(ctx, repo, "review-requested:@me") - if err != nil { - return errors.E("qa.review", "failed to fetch review requests", err) - } - - if len(prs) == 0 { - cli.Print("%s\n", dimStyle.Render(i18n.T("cmd.qa.review.no_reviews"))) - return nil - } - - cli.Print("%s (%d):\n", i18n.T("cmd.qa.review.review_requested"), len(prs)) - - for _, pr := range prs { - printPRForReview(pr) - } - - return nil -} - -// fetchPRs fetches PRs matching the search query -func fetchPRs(ctx context.Context, repo, search string) ([]PullRequest, error) { - args := []string{ - "pr", "list", - "--state", "open", - "--search", search, - "--json", "number,title,author,state,isDraft,mergeable,reviewDecision,url,headRefName,createdAt,updatedAt,additions,deletions,changedFiles,statusCheckRollup,reviewRequests,reviews", - } - - if repo != "" { - args = append(args, "--repo", repo) - } - - cmd := exec.CommandContext(ctx, "gh", args...) - output, err := cmd.Output() - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - return nil, fmt.Errorf("%s", strings.TrimSpace(string(exitErr.Stderr))) - } - return nil, err - } - - var prs []PullRequest - if err := json.Unmarshal(output, &prs); err != nil { - return nil, err - } - - return prs, nil -} - -// printPRStatus prints a PR with its merge status -func printPRStatus(pr PullRequest) { - // Determine status icon and color - status, style, action := analyzePRStatus(pr) - - cli.Print(" %s #%d %s\n", - style.Render(status), - pr.Number, - truncate(pr.Title, 50)) - - if action != "" { - cli.Print(" %s %s\n", dimStyle.Render("->"), action) - } -} - -// printPRForReview prints a PR that needs review -func printPRForReview(pr PullRequest) { - // Show PR info with stats - stats := fmt.Sprintf("+%d/-%d, %d files", - pr.Additions, pr.Deletions, pr.ChangedFiles) - - cli.Print(" %s #%d %s\n", - warningStyle.Render("◯"), - pr.Number, - truncate(pr.Title, 50)) - cli.Print(" %s @%s, %s\n", - dimStyle.Render("->"), - pr.Author.Login, - stats) - cli.Print(" %s gh pr checkout %d\n", - dimStyle.Render("->"), - pr.Number) -} - -// analyzePRStatus determines the status, style, and action for a PR -func analyzePRStatus(pr PullRequest) (status string, style *cli.AnsiStyle, action string) { - // Check if draft - if pr.IsDraft { - return "◯", dimStyle, "Draft - convert to ready when done" - } - - // Check CI status - ciPassed := true - ciFailed := false - ciPending := false - var failedCheck string - - if pr.StatusChecks != nil { - for _, check := range pr.StatusChecks.Contexts { - switch check.Conclusion { - case "FAILURE", "failure": - ciFailed = true - ciPassed = false - if failedCheck == "" { - failedCheck = check.Name - } - case "PENDING", "pending", "": - if check.State == "PENDING" || check.State == "" { - ciPending = true - ciPassed = false - } - } - } - } - - // Check review status - approved := pr.ReviewDecision == "APPROVED" - changesRequested := pr.ReviewDecision == "CHANGES_REQUESTED" - - // Check mergeable status - hasConflicts := pr.Mergeable == "CONFLICTING" - - // Determine overall status - if hasConflicts { - return "✗", errorStyle, "Needs rebase - has merge conflicts" - } - - if ciFailed { - return "✗", errorStyle, fmt.Sprintf("CI failed: %s", failedCheck) - } - - if changesRequested { - return "✗", warningStyle, "Changes requested - address review feedback" - } - - if ciPending { - return "◯", warningStyle, "CI running..." - } - - if !approved && pr.ReviewDecision != "" { - return "◯", warningStyle, "Awaiting review" - } - - if approved && ciPassed { - return "✓", successStyle, "Ready to merge" - } - - return "◯", dimStyle, "" -} - -// truncate shortens a string to max length (rune-safe for UTF-8) -func truncate(s string, max int) string { - runes := []rune(s) - if len(runes) <= max { - return s - } - return string(runes[:max-3]) + "..." -} diff --git a/pkg/qa/cmd_watch.go b/pkg/qa/cmd_watch.go deleted file mode 100644 index 2db17fe..0000000 --- a/pkg/qa/cmd_watch.go +++ /dev/null @@ -1,444 +0,0 @@ -// cmd_watch.go implements the 'qa watch' command for monitoring GitHub Actions. -// -// Usage: -// core qa watch # Watch current repo's latest push -// core qa watch --repo X # Watch specific repo -// core qa watch --commit SHA # Watch specific commit -// core qa watch --timeout 5m # Custom timeout (default: 10m) - -package qa - -import ( - "context" - "encoding/json" - "fmt" - "os/exec" - "strings" - "time" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/errors" - "github.com/host-uk/core/pkg/i18n" -) - -// Watch command flags -var ( - watchRepo string - watchCommit string - watchTimeout time.Duration -) - -// WorkflowRun represents a GitHub Actions workflow run -type WorkflowRun struct { - ID int64 `json:"databaseId"` - Name string `json:"name"` - DisplayTitle string `json:"displayTitle"` - Status string `json:"status"` - Conclusion string `json:"conclusion"` - HeadSha string `json:"headSha"` - URL string `json:"url"` - CreatedAt time.Time `json:"createdAt"` - UpdatedAt time.Time `json:"updatedAt"` -} - -// WorkflowJob represents a job within a workflow run -type WorkflowJob struct { - ID int64 `json:"databaseId"` - Name string `json:"name"` - Status string `json:"status"` - Conclusion string `json:"conclusion"` - URL string `json:"url"` -} - -// JobStep represents a step within a job -type JobStep struct { - Name string `json:"name"` - Status string `json:"status"` - Conclusion string `json:"conclusion"` - Number int `json:"number"` -} - -// addWatchCommand adds the 'watch' subcommand to the qa command. -func addWatchCommand(parent *cli.Command) { - watchCmd := &cli.Command{ - Use: "watch", - Short: i18n.T("cmd.qa.watch.short"), - Long: i18n.T("cmd.qa.watch.long"), - RunE: func(cmd *cli.Command, args []string) error { - return runWatch() - }, - } - - watchCmd.Flags().StringVarP(&watchRepo, "repo", "r", "", i18n.T("cmd.qa.watch.flag.repo")) - watchCmd.Flags().StringVarP(&watchCommit, "commit", "c", "", i18n.T("cmd.qa.watch.flag.commit")) - watchCmd.Flags().DurationVarP(&watchTimeout, "timeout", "t", 10*time.Minute, i18n.T("cmd.qa.watch.flag.timeout")) - - parent.AddCommand(watchCmd) -} - -func runWatch() error { - // Check gh is available - if _, err := exec.LookPath("gh"); err != nil { - return errors.E("qa.watch", i18n.T("error.gh_not_found"), nil) - } - - // Determine repo - repoFullName, err := resolveRepo(watchRepo) - if err != nil { - return err - } - - // Determine commit - commitSha, err := resolveCommit(watchCommit) - if err != nil { - return err - } - - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("repo")), repoFullName) - // Safe prefix for display - handle short SHAs gracefully - shaPrefix := commitSha - if len(commitSha) > 8 { - shaPrefix = commitSha[:8] - } - cli.Print("%s %s\n", dimStyle.Render(i18n.T("cmd.qa.watch.commit")), shaPrefix) - cli.Blank() - - // Create context with timeout for all gh commands - ctx, cancel := context.WithTimeout(context.Background(), watchTimeout) - defer cancel() - - // Poll for workflow runs - pollInterval := 3 * time.Second - var lastStatus string - - for { - // Check if context deadline exceeded - if ctx.Err() != nil { - cli.Blank() - return errors.E("qa.watch", i18n.T("cmd.qa.watch.timeout", map[string]interface{}{"Duration": watchTimeout}), nil) - } - - runs, err := fetchWorkflowRunsForCommit(ctx, repoFullName, commitSha) - if err != nil { - return errors.Wrap(err, "qa.watch", "failed to fetch workflow runs") - } - - if len(runs) == 0 { - // No workflows triggered yet, keep waiting - cli.Print("\033[2K\r%s", dimStyle.Render(i18n.T("cmd.qa.watch.waiting_for_workflows"))) - time.Sleep(pollInterval) - continue - } - - // Check status of all runs - allComplete := true - var pending, success, failed int - for _, run := range runs { - switch run.Status { - case "completed": - if run.Conclusion == "success" { - success++ - } else { - // Count all non-success conclusions as failed - // (failure, cancelled, timed_out, action_required, stale, etc.) - failed++ - } - default: - allComplete = false - pending++ - } - } - - // Build status line - status := fmt.Sprintf("%d workflow(s): ", len(runs)) - if pending > 0 { - status += warningStyle.Render(fmt.Sprintf("%d running", pending)) - if success > 0 || failed > 0 { - status += ", " - } - } - if success > 0 { - status += successStyle.Render(fmt.Sprintf("%d passed", success)) - if failed > 0 { - status += ", " - } - } - if failed > 0 { - status += errorStyle.Render(fmt.Sprintf("%d failed", failed)) - } - - // Only print if status changed - if status != lastStatus { - cli.Print("\033[2K\r%s", status) - lastStatus = status - } - - if allComplete { - cli.Blank() - cli.Blank() - return printResults(ctx, repoFullName, runs) - } - - time.Sleep(pollInterval) - } -} - -// resolveRepo determines the repo to watch -func resolveRepo(specified string) (string, error) { - if specified != "" { - // If it contains /, assume it's already full name - if strings.Contains(specified, "/") { - return specified, nil - } - // Try to get org from current directory - org := detectOrgFromGit() - if org != "" { - return org + "/" + specified, nil - } - return "", errors.E("qa.watch", i18n.T("cmd.qa.watch.error.repo_format"), nil) - } - - // Detect from current directory - return detectRepoFromGit() -} - -// resolveCommit determines the commit to watch -func resolveCommit(specified string) (string, error) { - if specified != "" { - return specified, nil - } - - // Get HEAD commit - cmd := exec.Command("git", "rev-parse", "HEAD") - output, err := cmd.Output() - if err != nil { - return "", errors.Wrap(err, "qa.watch", "failed to get HEAD commit") - } - - return strings.TrimSpace(string(output)), nil -} - -// detectRepoFromGit detects the repo from git remote -func detectRepoFromGit() (string, error) { - cmd := exec.Command("git", "remote", "get-url", "origin") - output, err := cmd.Output() - if err != nil { - return "", errors.E("qa.watch", i18n.T("cmd.qa.watch.error.not_git_repo"), nil) - } - - url := strings.TrimSpace(string(output)) - return parseGitHubRepo(url) -} - -// detectOrgFromGit tries to detect the org from git remote -func detectOrgFromGit() string { - repo, err := detectRepoFromGit() - if err != nil { - return "" - } - parts := strings.Split(repo, "/") - if len(parts) >= 1 { - return parts[0] - } - return "" -} - -// parseGitHubRepo extracts org/repo from a git URL -func parseGitHubRepo(url string) (string, error) { - // Handle SSH URLs: git@github.com:org/repo.git - if strings.HasPrefix(url, "git@github.com:") { - path := strings.TrimPrefix(url, "git@github.com:") - path = strings.TrimSuffix(path, ".git") - return path, nil - } - - // Handle HTTPS URLs: https://github.com/org/repo.git - if strings.Contains(url, "github.com/") { - parts := strings.Split(url, "github.com/") - if len(parts) >= 2 { - path := strings.TrimSuffix(parts[1], ".git") - return path, nil - } - } - - return "", fmt.Errorf("could not parse GitHub repo from URL: %s", url) -} - -// fetchWorkflowRunsForCommit fetches workflow runs for a specific commit -func fetchWorkflowRunsForCommit(ctx context.Context, repoFullName, commitSha string) ([]WorkflowRun, error) { - args := []string{ - "run", "list", - "--repo", repoFullName, - "--commit", commitSha, - "--json", "databaseId,name,displayTitle,status,conclusion,headSha,url,createdAt,updatedAt", - } - - cmd := exec.CommandContext(ctx, "gh", args...) - output, err := cmd.Output() - if err != nil { - // Check if context was cancelled/deadline exceeded - if ctx.Err() != nil { - return nil, ctx.Err() - } - if exitErr, ok := err.(*exec.ExitError); ok { - return nil, cli.Err("%s", strings.TrimSpace(string(exitErr.Stderr))) - } - return nil, err - } - - var runs []WorkflowRun - if err := json.Unmarshal(output, &runs); err != nil { - return nil, err - } - - return runs, nil -} - -// printResults prints the final results with actionable information -func printResults(ctx context.Context, repoFullName string, runs []WorkflowRun) error { - var failures []WorkflowRun - var successes []WorkflowRun - - for _, run := range runs { - if run.Conclusion == "success" { - successes = append(successes, run) - } else { - // Treat all non-success as failures (failure, cancelled, timed_out, etc.) - failures = append(failures, run) - } - } - - // Print successes briefly - for _, run := range successes { - cli.Print("%s %s\n", successStyle.Render(cli.Glyph(":check:")), run.Name) - } - - // Print failures with details - for _, run := range failures { - cli.Print("%s %s\n", errorStyle.Render(cli.Glyph(":cross:")), run.Name) - - // Fetch failed job details - failedJob, failedStep, errorLine := fetchFailureDetails(ctx, repoFullName, run.ID) - if failedJob != "" { - cli.Print(" %s Job: %s", dimStyle.Render("->"), failedJob) - if failedStep != "" { - cli.Print(" (step: %s)", failedStep) - } - cli.Blank() - } - if errorLine != "" { - cli.Print(" %s Error: %s\n", dimStyle.Render("->"), errorLine) - } - cli.Print(" %s %s\n", dimStyle.Render("->"), run.URL) - } - - // Exit with error if any failures - if len(failures) > 0 { - cli.Blank() - return cli.Err("%s", i18n.T("cmd.qa.watch.workflows_failed", map[string]interface{}{"Count": len(failures)})) - } - - cli.Blank() - cli.Print("%s\n", successStyle.Render(i18n.T("cmd.qa.watch.all_passed"))) - return nil -} - -// fetchFailureDetails fetches details about why a workflow failed -func fetchFailureDetails(ctx context.Context, repoFullName string, runID int64) (jobName, stepName, errorLine string) { - // Fetch jobs for this run - args := []string{ - "run", "view", fmt.Sprintf("%d", runID), - "--repo", repoFullName, - "--json", "jobs", - } - - cmd := exec.CommandContext(ctx, "gh", args...) - output, err := cmd.Output() - if err != nil { - return "", "", "" - } - - var result struct { - Jobs []struct { - Name string `json:"name"` - Conclusion string `json:"conclusion"` - Steps []struct { - Name string `json:"name"` - Conclusion string `json:"conclusion"` - Number int `json:"number"` - } `json:"steps"` - } `json:"jobs"` - } - - if err := json.Unmarshal(output, &result); err != nil { - return "", "", "" - } - - // Find the failed job and step - for _, job := range result.Jobs { - if job.Conclusion == "failure" { - jobName = job.Name - for _, step := range job.Steps { - if step.Conclusion == "failure" { - stepName = fmt.Sprintf("%d: %s", step.Number, step.Name) - break - } - } - break - } - } - - // Try to get the error line from logs (if available) - errorLine = fetchErrorFromLogs(ctx, repoFullName, runID) - - return jobName, stepName, errorLine -} - -// fetchErrorFromLogs attempts to extract the first error line from workflow logs -func fetchErrorFromLogs(ctx context.Context, repoFullName string, runID int64) string { - // Use gh run view --log-failed to get failed step logs - args := []string{ - "run", "view", fmt.Sprintf("%d", runID), - "--repo", repoFullName, - "--log-failed", - } - - cmd := exec.CommandContext(ctx, "gh", args...) - output, err := cmd.Output() - if err != nil { - return "" - } - - // Parse output to find the first meaningful error line - lines := strings.Split(string(output), "\n") - for _, line := range lines { - line = strings.TrimSpace(line) - if line == "" { - continue - } - - // Skip common metadata/progress lines - lower := strings.ToLower(line) - if strings.HasPrefix(lower, "##[") { // GitHub Actions command markers - continue - } - if strings.HasPrefix(line, "Run ") || strings.HasPrefix(line, "Running ") { - continue - } - - // Look for error indicators - if strings.Contains(lower, "error") || - strings.Contains(lower, "failed") || - strings.Contains(lower, "fatal") || - strings.Contains(lower, "panic") || - strings.Contains(line, ": ") { // Likely a file:line or key: value format - // Truncate long lines - if len(line) > 120 { - line = line[:117] + "..." - } - return line - } - } - - return "" -} diff --git a/pkg/release/changelog.go b/pkg/release/changelog.go deleted file mode 100644 index c25fc52..0000000 --- a/pkg/release/changelog.go +++ /dev/null @@ -1,321 +0,0 @@ -// Package release provides release automation with changelog generation and publishing. -package release - -import ( - "bufio" - "bytes" - "fmt" - "os/exec" - "regexp" - "sort" - "strings" - - "golang.org/x/text/cases" - "golang.org/x/text/language" -) - -// ConventionalCommit represents a parsed conventional commit. -type ConventionalCommit struct { - Type string // feat, fix, etc. - Scope string // optional scope in parentheses - Description string // commit description - Hash string // short commit hash - Breaking bool // has breaking change indicator -} - -// commitTypeLabels maps commit types to human-readable labels for the changelog. -var commitTypeLabels = map[string]string{ - "feat": "Features", - "fix": "Bug Fixes", - "perf": "Performance Improvements", - "refactor": "Code Refactoring", - "docs": "Documentation", - "style": "Styles", - "test": "Tests", - "build": "Build System", - "ci": "Continuous Integration", - "chore": "Chores", - "revert": "Reverts", -} - -// commitTypeOrder defines the order of sections in the changelog. -var commitTypeOrder = []string{ - "feat", - "fix", - "perf", - "refactor", - "docs", - "style", - "test", - "build", - "ci", - "chore", - "revert", -} - -// conventionalCommitRegex matches conventional commit format. -// Examples: "feat: add feature", "fix(scope): fix bug", "feat!: breaking change" -var conventionalCommitRegex = regexp.MustCompile(`^(\w+)(?:\(([^)]+)\))?(!)?:\s*(.+)$`) - -// Generate generates a markdown changelog from git commits between two refs. -// If fromRef is empty, it uses the previous tag or initial commit. -// If toRef is empty, it uses HEAD. -func Generate(dir, fromRef, toRef string) (string, error) { - if toRef == "" { - toRef = "HEAD" - } - - // If fromRef is empty, try to find previous tag - if fromRef == "" { - prevTag, err := getPreviousTag(dir, toRef) - if err != nil { - // No previous tag, use initial commit - fromRef = "" - } else { - fromRef = prevTag - } - } - - // Get commits between refs - commits, err := getCommits(dir, fromRef, toRef) - if err != nil { - return "", fmt.Errorf("changelog.Generate: failed to get commits: %w", err) - } - - // Parse conventional commits - var parsedCommits []ConventionalCommit - for _, commit := range commits { - parsed := parseConventionalCommit(commit) - if parsed != nil { - parsedCommits = append(parsedCommits, *parsed) - } - } - - // Generate markdown - return formatChangelog(parsedCommits, toRef), nil -} - -// GenerateWithConfig generates a changelog with filtering based on config. -func GenerateWithConfig(dir, fromRef, toRef string, cfg *ChangelogConfig) (string, error) { - if toRef == "" { - toRef = "HEAD" - } - - // If fromRef is empty, try to find previous tag - if fromRef == "" { - prevTag, err := getPreviousTag(dir, toRef) - if err != nil { - fromRef = "" - } else { - fromRef = prevTag - } - } - - // Get commits between refs - commits, err := getCommits(dir, fromRef, toRef) - if err != nil { - return "", fmt.Errorf("changelog.GenerateWithConfig: failed to get commits: %w", err) - } - - // Build include/exclude sets - includeSet := make(map[string]bool) - excludeSet := make(map[string]bool) - for _, t := range cfg.Include { - includeSet[t] = true - } - for _, t := range cfg.Exclude { - excludeSet[t] = true - } - - // Parse and filter conventional commits - var parsedCommits []ConventionalCommit - for _, commit := range commits { - parsed := parseConventionalCommit(commit) - if parsed == nil { - continue - } - - // Apply filters - if len(includeSet) > 0 && !includeSet[parsed.Type] { - continue - } - if excludeSet[parsed.Type] { - continue - } - - parsedCommits = append(parsedCommits, *parsed) - } - - return formatChangelog(parsedCommits, toRef), nil -} - -// getPreviousTag returns the tag before the given ref. -func getPreviousTag(dir, ref string) (string, error) { - cmd := exec.Command("git", "describe", "--tags", "--abbrev=0", ref+"^") - cmd.Dir = dir - output, err := cmd.Output() - if err != nil { - return "", err - } - return strings.TrimSpace(string(output)), nil -} - -// getCommits returns a slice of commit strings between two refs. -// Format: "hash subject" -func getCommits(dir, fromRef, toRef string) ([]string, error) { - var args []string - if fromRef == "" { - // All commits up to toRef - args = []string{"log", "--oneline", "--no-merges", toRef} - } else { - // Commits between refs - args = []string{"log", "--oneline", "--no-merges", fromRef + ".." + toRef} - } - - cmd := exec.Command("git", args...) - cmd.Dir = dir - output, err := cmd.Output() - if err != nil { - return nil, err - } - - var commits []string - scanner := bufio.NewScanner(bytes.NewReader(output)) - for scanner.Scan() { - line := scanner.Text() - if line != "" { - commits = append(commits, line) - } - } - - return commits, scanner.Err() -} - -// parseConventionalCommit parses a git log --oneline output into a ConventionalCommit. -// Returns nil if the commit doesn't follow conventional commit format. -func parseConventionalCommit(commitLine string) *ConventionalCommit { - // Split hash and subject - parts := strings.SplitN(commitLine, " ", 2) - if len(parts) != 2 { - return nil - } - - hash := parts[0] - subject := parts[1] - - // Match conventional commit format - matches := conventionalCommitRegex.FindStringSubmatch(subject) - if matches == nil { - return nil - } - - return &ConventionalCommit{ - Type: strings.ToLower(matches[1]), - Scope: matches[2], - Breaking: matches[3] == "!", - Description: matches[4], - Hash: hash, - } -} - -// formatChangelog formats parsed commits into markdown. -func formatChangelog(commits []ConventionalCommit, version string) string { - if len(commits) == 0 { - return fmt.Sprintf("## %s\n\nNo notable changes.", version) - } - - // Group commits by type - grouped := make(map[string][]ConventionalCommit) - var breaking []ConventionalCommit - - for _, commit := range commits { - if commit.Breaking { - breaking = append(breaking, commit) - } - grouped[commit.Type] = append(grouped[commit.Type], commit) - } - - var buf strings.Builder - buf.WriteString(fmt.Sprintf("## %s\n\n", version)) - - // Breaking changes first - if len(breaking) > 0 { - buf.WriteString("### BREAKING CHANGES\n\n") - for _, commit := range breaking { - buf.WriteString(formatCommitLine(commit)) - } - buf.WriteString("\n") - } - - // Other sections in order - for _, commitType := range commitTypeOrder { - commits, ok := grouped[commitType] - if !ok || len(commits) == 0 { - continue - } - - label, ok := commitTypeLabels[commitType] - if !ok { - label = cases.Title(language.English).String(commitType) - } - - buf.WriteString(fmt.Sprintf("### %s\n\n", label)) - for _, commit := range commits { - buf.WriteString(formatCommitLine(commit)) - } - buf.WriteString("\n") - } - - // Any remaining types not in the order list - var remainingTypes []string - for commitType := range grouped { - found := false - for _, t := range commitTypeOrder { - if t == commitType { - found = true - break - } - } - if !found { - remainingTypes = append(remainingTypes, commitType) - } - } - sort.Strings(remainingTypes) - - for _, commitType := range remainingTypes { - commits := grouped[commitType] - label := cases.Title(language.English).String(commitType) - buf.WriteString(fmt.Sprintf("### %s\n\n", label)) - for _, commit := range commits { - buf.WriteString(formatCommitLine(commit)) - } - buf.WriteString("\n") - } - - return strings.TrimSuffix(buf.String(), "\n") -} - -// formatCommitLine formats a single commit as a changelog line. -func formatCommitLine(commit ConventionalCommit) string { - var buf strings.Builder - buf.WriteString("- ") - - if commit.Scope != "" { - buf.WriteString(fmt.Sprintf("**%s**: ", commit.Scope)) - } - - buf.WriteString(commit.Description) - buf.WriteString(fmt.Sprintf(" (%s)\n", commit.Hash)) - - return buf.String() -} - -// ParseCommitType extracts the type from a conventional commit subject. -// Returns empty string if not a conventional commit. -func ParseCommitType(subject string) string { - matches := conventionalCommitRegex.FindStringSubmatch(subject) - if matches == nil { - return "" - } - return strings.ToLower(matches[1]) -} diff --git a/pkg/release/changelog_test.go b/pkg/release/changelog_test.go deleted file mode 100644 index ac7d4de..0000000 --- a/pkg/release/changelog_test.go +++ /dev/null @@ -1,695 +0,0 @@ -package release - -import ( - "os" - "os/exec" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestParseConventionalCommit_Good(t *testing.T) { - tests := []struct { - name string - input string - expected *ConventionalCommit - }{ - { - name: "feat without scope", - input: "abc1234 feat: add new feature", - expected: &ConventionalCommit{ - Type: "feat", - Scope: "", - Description: "add new feature", - Hash: "abc1234", - Breaking: false, - }, - }, - { - name: "fix with scope", - input: "def5678 fix(auth): resolve login issue", - expected: &ConventionalCommit{ - Type: "fix", - Scope: "auth", - Description: "resolve login issue", - Hash: "def5678", - Breaking: false, - }, - }, - { - name: "breaking change with exclamation", - input: "ghi9012 feat!: breaking API change", - expected: &ConventionalCommit{ - Type: "feat", - Scope: "", - Description: "breaking API change", - Hash: "ghi9012", - Breaking: true, - }, - }, - { - name: "breaking change with scope", - input: "jkl3456 fix(api)!: remove deprecated endpoint", - expected: &ConventionalCommit{ - Type: "fix", - Scope: "api", - Description: "remove deprecated endpoint", - Hash: "jkl3456", - Breaking: true, - }, - }, - { - name: "perf type", - input: "mno7890 perf: optimize database queries", - expected: &ConventionalCommit{ - Type: "perf", - Scope: "", - Description: "optimize database queries", - Hash: "mno7890", - Breaking: false, - }, - }, - { - name: "chore type", - input: "pqr1234 chore: update dependencies", - expected: &ConventionalCommit{ - Type: "chore", - Scope: "", - Description: "update dependencies", - Hash: "pqr1234", - Breaking: false, - }, - }, - { - name: "uppercase type normalizes to lowercase", - input: "stu5678 FEAT: uppercase type", - expected: &ConventionalCommit{ - Type: "feat", - Scope: "", - Description: "uppercase type", - Hash: "stu5678", - Breaking: false, - }, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result := parseConventionalCommit(tc.input) - assert.NotNil(t, result) - assert.Equal(t, tc.expected.Type, result.Type) - assert.Equal(t, tc.expected.Scope, result.Scope) - assert.Equal(t, tc.expected.Description, result.Description) - assert.Equal(t, tc.expected.Hash, result.Hash) - assert.Equal(t, tc.expected.Breaking, result.Breaking) - }) - } -} - -func TestParseConventionalCommit_Bad(t *testing.T) { - tests := []struct { - name string - input string - }{ - { - name: "non-conventional commit", - input: "abc1234 Update README", - }, - { - name: "missing colon", - input: "def5678 feat add feature", - }, - { - name: "empty subject", - input: "ghi9012", - }, - { - name: "just hash", - input: "abc1234", - }, - { - name: "merge commit", - input: "abc1234 Merge pull request #123", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result := parseConventionalCommit(tc.input) - assert.Nil(t, result) - }) - } -} - -func TestFormatChangelog_Good(t *testing.T) { - t.Run("formats commits by type", func(t *testing.T) { - commits := []ConventionalCommit{ - {Type: "feat", Description: "add feature A", Hash: "abc1234"}, - {Type: "fix", Description: "fix bug B", Hash: "def5678"}, - {Type: "feat", Description: "add feature C", Hash: "ghi9012"}, - } - - result := formatChangelog(commits, "v1.0.0") - - assert.Contains(t, result, "## v1.0.0") - assert.Contains(t, result, "### Features") - assert.Contains(t, result, "### Bug Fixes") - assert.Contains(t, result, "- add feature A (abc1234)") - assert.Contains(t, result, "- fix bug B (def5678)") - assert.Contains(t, result, "- add feature C (ghi9012)") - }) - - t.Run("includes scope in output", func(t *testing.T) { - commits := []ConventionalCommit{ - {Type: "feat", Scope: "api", Description: "add endpoint", Hash: "abc1234"}, - } - - result := formatChangelog(commits, "v1.0.0") - - assert.Contains(t, result, "**api**: add endpoint") - }) - - t.Run("breaking changes first", func(t *testing.T) { - commits := []ConventionalCommit{ - {Type: "feat", Description: "normal feature", Hash: "abc1234"}, - {Type: "feat", Description: "breaking feature", Hash: "def5678", Breaking: true}, - } - - result := formatChangelog(commits, "v1.0.0") - - assert.Contains(t, result, "### BREAKING CHANGES") - // Breaking changes section should appear before Features - breakingPos := indexOf(result, "BREAKING CHANGES") - featuresPos := indexOf(result, "Features") - assert.Less(t, breakingPos, featuresPos) - }) - - t.Run("empty commits returns minimal changelog", func(t *testing.T) { - result := formatChangelog([]ConventionalCommit{}, "v1.0.0") - - assert.Contains(t, result, "## v1.0.0") - assert.Contains(t, result, "No notable changes") - }) -} - -func TestParseCommitType_Good(t *testing.T) { - tests := []struct { - input string - expected string - }{ - {"feat: add feature", "feat"}, - {"fix(scope): fix bug", "fix"}, - {"perf!: breaking perf", "perf"}, - {"chore: update deps", "chore"}, - } - - for _, tc := range tests { - t.Run(tc.input, func(t *testing.T) { - result := ParseCommitType(tc.input) - assert.Equal(t, tc.expected, result) - }) - } -} - -func TestParseCommitType_Bad(t *testing.T) { - tests := []struct { - input string - }{ - {"not a conventional commit"}, - {"Update README"}, - {"Merge branch 'main'"}, - } - - for _, tc := range tests { - t.Run(tc.input, func(t *testing.T) { - result := ParseCommitType(tc.input) - assert.Empty(t, result) - }) - } -} - -func TestGenerateWithConfig_ConfigValues(t *testing.T) { - t.Run("config filters are parsed correctly", func(t *testing.T) { - cfg := &ChangelogConfig{ - Include: []string{"feat", "fix"}, - Exclude: []string{"chore", "docs"}, - } - - // Verify the config values - assert.Contains(t, cfg.Include, "feat") - assert.Contains(t, cfg.Include, "fix") - assert.Contains(t, cfg.Exclude, "chore") - assert.Contains(t, cfg.Exclude, "docs") - }) -} - -// indexOf returns the position of a substring in a string, or -1 if not found. -func indexOf(s, substr string) int { - for i := 0; i+len(substr) <= len(s); i++ { - if s[i:i+len(substr)] == substr { - return i - } - } - return -1 -} - -// setupChangelogGitRepo creates a temporary directory with an initialized git repository. -func setupChangelogGitRepo(t *testing.T) string { - t.Helper() - dir := t.TempDir() - - // Initialize git repo - cmd := exec.Command("git", "init") - cmd.Dir = dir - require.NoError(t, cmd.Run()) - - // Configure git user for commits - cmd = exec.Command("git", "config", "user.email", "test@example.com") - cmd.Dir = dir - require.NoError(t, cmd.Run()) - - cmd = exec.Command("git", "config", "user.name", "Test User") - cmd.Dir = dir - require.NoError(t, cmd.Run()) - - return dir -} - -// createChangelogCommit creates a commit in the given directory. -func createChangelogCommit(t *testing.T, dir, message string) { - t.Helper() - - // Create or modify a file - filePath := filepath.Join(dir, "changelog_test.txt") - content, _ := os.ReadFile(filePath) - content = append(content, []byte(message+"\n")...) - require.NoError(t, os.WriteFile(filePath, content, 0644)) - - // Stage and commit - cmd := exec.Command("git", "add", ".") - cmd.Dir = dir - require.NoError(t, cmd.Run()) - - cmd = exec.Command("git", "commit", "-m", message) - cmd.Dir = dir - require.NoError(t, cmd.Run()) -} - -// createChangelogTag creates a tag in the given directory. -func createChangelogTag(t *testing.T, dir, tag string) { - t.Helper() - cmd := exec.Command("git", "tag", tag) - cmd.Dir = dir - require.NoError(t, cmd.Run()) -} - -func TestGenerate_Good(t *testing.T) { - t.Run("generates changelog from commits", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: add new feature") - createChangelogCommit(t, dir, "fix: resolve bug") - - changelog, err := Generate(dir, "", "HEAD") - require.NoError(t, err) - - assert.Contains(t, changelog, "## HEAD") - assert.Contains(t, changelog, "### Features") - assert.Contains(t, changelog, "add new feature") - assert.Contains(t, changelog, "### Bug Fixes") - assert.Contains(t, changelog, "resolve bug") - }) - - t.Run("generates changelog between tags", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: initial feature") - createChangelogTag(t, dir, "v1.0.0") - createChangelogCommit(t, dir, "feat: new feature") - createChangelogCommit(t, dir, "fix: bug fix") - createChangelogTag(t, dir, "v1.1.0") - - changelog, err := Generate(dir, "v1.0.0", "v1.1.0") - require.NoError(t, err) - - assert.Contains(t, changelog, "## v1.1.0") - assert.Contains(t, changelog, "new feature") - assert.Contains(t, changelog, "bug fix") - // Should NOT contain the initial feature - assert.NotContains(t, changelog, "initial feature") - }) - - t.Run("handles empty changelog when no conventional commits", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "Update README") - createChangelogCommit(t, dir, "Merge branch main") - - changelog, err := Generate(dir, "", "HEAD") - require.NoError(t, err) - - assert.Contains(t, changelog, "No notable changes") - }) - - t.Run("uses previous tag when fromRef is empty", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: old feature") - createChangelogTag(t, dir, "v1.0.0") - createChangelogCommit(t, dir, "feat: new feature") - - changelog, err := Generate(dir, "", "HEAD") - require.NoError(t, err) - - assert.Contains(t, changelog, "new feature") - assert.NotContains(t, changelog, "old feature") - }) - - t.Run("includes breaking changes", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat!: breaking API change") - createChangelogCommit(t, dir, "feat: normal feature") - - changelog, err := Generate(dir, "", "HEAD") - require.NoError(t, err) - - assert.Contains(t, changelog, "### BREAKING CHANGES") - assert.Contains(t, changelog, "breaking API change") - }) - - t.Run("includes scope in output", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat(api): add endpoint") - - changelog, err := Generate(dir, "", "HEAD") - require.NoError(t, err) - - assert.Contains(t, changelog, "**api**:") - }) -} - -func TestGenerate_Bad(t *testing.T) { - t.Run("returns error for non-git directory", func(t *testing.T) { - dir := t.TempDir() - - _, err := Generate(dir, "", "HEAD") - assert.Error(t, err) - }) -} - -func TestGenerateWithConfig_Good(t *testing.T) { - t.Run("filters commits by include list", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: new feature") - createChangelogCommit(t, dir, "fix: bug fix") - createChangelogCommit(t, dir, "chore: update deps") - - cfg := &ChangelogConfig{ - Include: []string{"feat"}, - } - - changelog, err := GenerateWithConfig(dir, "", "HEAD", cfg) - require.NoError(t, err) - - assert.Contains(t, changelog, "new feature") - assert.NotContains(t, changelog, "bug fix") - assert.NotContains(t, changelog, "update deps") - }) - - t.Run("filters commits by exclude list", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: new feature") - createChangelogCommit(t, dir, "fix: bug fix") - createChangelogCommit(t, dir, "chore: update deps") - - cfg := &ChangelogConfig{ - Exclude: []string{"chore"}, - } - - changelog, err := GenerateWithConfig(dir, "", "HEAD", cfg) - require.NoError(t, err) - - assert.Contains(t, changelog, "new feature") - assert.Contains(t, changelog, "bug fix") - assert.NotContains(t, changelog, "update deps") - }) - - t.Run("combines include and exclude filters", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: new feature") - createChangelogCommit(t, dir, "fix: bug fix") - createChangelogCommit(t, dir, "perf: performance") - - cfg := &ChangelogConfig{ - Include: []string{"feat", "fix", "perf"}, - Exclude: []string{"perf"}, - } - - changelog, err := GenerateWithConfig(dir, "", "HEAD", cfg) - require.NoError(t, err) - - assert.Contains(t, changelog, "new feature") - assert.Contains(t, changelog, "bug fix") - assert.NotContains(t, changelog, "performance") - }) -} - -func TestGetCommits_Good(t *testing.T) { - t.Run("returns all commits when fromRef is empty", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: first") - createChangelogCommit(t, dir, "feat: second") - createChangelogCommit(t, dir, "feat: third") - - commits, err := getCommits(dir, "", "HEAD") - require.NoError(t, err) - - assert.Len(t, commits, 3) - }) - - t.Run("returns commits between refs", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: first") - createChangelogTag(t, dir, "v1.0.0") - createChangelogCommit(t, dir, "feat: second") - createChangelogCommit(t, dir, "feat: third") - - commits, err := getCommits(dir, "v1.0.0", "HEAD") - require.NoError(t, err) - - assert.Len(t, commits, 2) - }) - - t.Run("excludes merge commits", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: regular commit") - // Merge commits are excluded by --no-merges flag - // We can verify by checking the count matches expected - - commits, err := getCommits(dir, "", "HEAD") - require.NoError(t, err) - - assert.Len(t, commits, 1) - assert.Contains(t, commits[0], "regular commit") - }) - - t.Run("returns empty slice for no commits in range", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: only commit") - createChangelogTag(t, dir, "v1.0.0") - - commits, err := getCommits(dir, "v1.0.0", "HEAD") - require.NoError(t, err) - - assert.Empty(t, commits) - }) -} - -func TestGetCommits_Bad(t *testing.T) { - t.Run("returns error for invalid ref", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: commit") - - _, err := getCommits(dir, "nonexistent-tag", "HEAD") - assert.Error(t, err) - }) - - t.Run("returns error for non-git directory", func(t *testing.T) { - dir := t.TempDir() - - _, err := getCommits(dir, "", "HEAD") - assert.Error(t, err) - }) -} - -func TestGetPreviousTag_Good(t *testing.T) { - t.Run("returns previous tag", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: first") - createChangelogTag(t, dir, "v1.0.0") - createChangelogCommit(t, dir, "feat: second") - createChangelogTag(t, dir, "v1.1.0") - - tag, err := getPreviousTag(dir, "v1.1.0") - require.NoError(t, err) - assert.Equal(t, "v1.0.0", tag) - }) - - t.Run("returns tag before HEAD", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: first") - createChangelogTag(t, dir, "v1.0.0") - createChangelogCommit(t, dir, "feat: second") - - tag, err := getPreviousTag(dir, "HEAD") - require.NoError(t, err) - assert.Equal(t, "v1.0.0", tag) - }) -} - -func TestGetPreviousTag_Bad(t *testing.T) { - t.Run("returns error when no previous tag exists", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: first") - createChangelogTag(t, dir, "v1.0.0") - - // v1.0.0^ has no tag before it - _, err := getPreviousTag(dir, "v1.0.0") - assert.Error(t, err) - }) - - t.Run("returns error for invalid ref", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: commit") - - _, err := getPreviousTag(dir, "nonexistent") - assert.Error(t, err) - }) -} - -func TestFormatCommitLine_Good(t *testing.T) { - t.Run("formats commit without scope", func(t *testing.T) { - commit := ConventionalCommit{ - Type: "feat", - Description: "add feature", - Hash: "abc1234", - } - - result := formatCommitLine(commit) - assert.Equal(t, "- add feature (abc1234)\n", result) - }) - - t.Run("formats commit with scope", func(t *testing.T) { - commit := ConventionalCommit{ - Type: "fix", - Scope: "api", - Description: "fix bug", - Hash: "def5678", - } - - result := formatCommitLine(commit) - assert.Equal(t, "- **api**: fix bug (def5678)\n", result) - }) -} - -func TestFormatChangelog_Ugly(t *testing.T) { - t.Run("handles custom commit type not in order", func(t *testing.T) { - commits := []ConventionalCommit{ - {Type: "custom", Description: "custom type", Hash: "abc1234"}, - } - - result := formatChangelog(commits, "v1.0.0") - - assert.Contains(t, result, "### Custom") - assert.Contains(t, result, "custom type") - }) - - t.Run("handles multiple custom commit types", func(t *testing.T) { - commits := []ConventionalCommit{ - {Type: "alpha", Description: "alpha feature", Hash: "abc1234"}, - {Type: "beta", Description: "beta feature", Hash: "def5678"}, - } - - result := formatChangelog(commits, "v1.0.0") - - // Should be sorted alphabetically for custom types - assert.Contains(t, result, "### Alpha") - assert.Contains(t, result, "### Beta") - }) -} - -func TestGenerateWithConfig_Bad(t *testing.T) { - t.Run("returns error for non-git directory", func(t *testing.T) { - dir := t.TempDir() - cfg := &ChangelogConfig{ - Include: []string{"feat"}, - } - - _, err := GenerateWithConfig(dir, "", "HEAD", cfg) - assert.Error(t, err) - }) -} - -func TestGenerateWithConfig_EdgeCases(t *testing.T) { - t.Run("uses HEAD when toRef is empty", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: new feature") - - cfg := &ChangelogConfig{ - Include: []string{"feat"}, - } - - // Pass empty toRef - changelog, err := GenerateWithConfig(dir, "", "", cfg) - require.NoError(t, err) - - assert.Contains(t, changelog, "## HEAD") - }) - - t.Run("handles previous tag lookup failure gracefully", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: first") - - cfg := &ChangelogConfig{ - Include: []string{"feat"}, - } - - // No tags exist, should still work - changelog, err := GenerateWithConfig(dir, "", "HEAD", cfg) - require.NoError(t, err) - - assert.Contains(t, changelog, "first") - }) - - t.Run("uses explicit fromRef when provided", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: old feature") - createChangelogTag(t, dir, "v1.0.0") - createChangelogCommit(t, dir, "feat: new feature") - - cfg := &ChangelogConfig{ - Include: []string{"feat"}, - } - - // Use explicit fromRef - changelog, err := GenerateWithConfig(dir, "v1.0.0", "HEAD", cfg) - require.NoError(t, err) - - assert.Contains(t, changelog, "new feature") - assert.NotContains(t, changelog, "old feature") - }) - - t.Run("skips non-conventional commits", func(t *testing.T) { - dir := setupChangelogGitRepo(t) - createChangelogCommit(t, dir, "feat: conventional commit") - createChangelogCommit(t, dir, "Update README") - - cfg := &ChangelogConfig{ - Include: []string{"feat"}, - } - - changelog, err := GenerateWithConfig(dir, "", "HEAD", cfg) - require.NoError(t, err) - - assert.Contains(t, changelog, "conventional commit") - assert.NotContains(t, changelog, "Update README") - }) -} diff --git a/pkg/release/config.go b/pkg/release/config.go deleted file mode 100644 index ae3d15b..0000000 --- a/pkg/release/config.go +++ /dev/null @@ -1,300 +0,0 @@ -// Package release provides release automation with changelog generation and publishing. -package release - -import ( - "fmt" - "os" - "path/filepath" - - "gopkg.in/yaml.v3" -) - -// ConfigFileName is the name of the release configuration file. -const ConfigFileName = "release.yaml" - -// ConfigDir is the directory where release configuration is stored. -const ConfigDir = ".core" - -// Config holds the complete release configuration loaded from .core/release.yaml. -type Config struct { - // Version is the config file format version. - Version int `yaml:"version"` - // Project contains project metadata. - Project ProjectConfig `yaml:"project"` - // Build contains build settings for the release. - Build BuildConfig `yaml:"build"` - // Publishers defines where to publish the release. - Publishers []PublisherConfig `yaml:"publishers"` - // Changelog configures changelog generation. - Changelog ChangelogConfig `yaml:"changelog"` - // SDK configures SDK generation. - SDK *SDKConfig `yaml:"sdk,omitempty"` - - // Internal fields (not serialized) - projectDir string // Set by LoadConfig - version string // Set by CLI flag -} - -// ProjectConfig holds project metadata for releases. -type ProjectConfig struct { - // Name is the project name. - Name string `yaml:"name"` - // Repository is the GitHub repository in owner/repo format. - Repository string `yaml:"repository"` -} - -// BuildConfig holds build settings for releases. -type BuildConfig struct { - // Targets defines the build targets. - Targets []TargetConfig `yaml:"targets"` -} - -// TargetConfig defines a build target. -type TargetConfig struct { - // OS is the target operating system (e.g., "linux", "darwin", "windows"). - OS string `yaml:"os"` - // Arch is the target architecture (e.g., "amd64", "arm64"). - Arch string `yaml:"arch"` -} - -// PublisherConfig holds configuration for a publisher. -type PublisherConfig struct { - // Type is the publisher type (e.g., "github", "linuxkit", "docker"). - Type string `yaml:"type"` - // Prerelease marks the release as a prerelease. - Prerelease bool `yaml:"prerelease"` - // Draft creates the release as a draft. - Draft bool `yaml:"draft"` - - // LinuxKit-specific configuration - // Config is the path to the LinuxKit YAML configuration file. - Config string `yaml:"config,omitempty"` - // Formats are the output formats to build (iso, raw, qcow2, vmdk). - Formats []string `yaml:"formats,omitempty"` - // Platforms are the target platforms (linux/amd64, linux/arm64). - Platforms []string `yaml:"platforms,omitempty"` - - // Docker-specific configuration - // Registry is the container registry (default: ghcr.io). - Registry string `yaml:"registry,omitempty"` - // Image is the image name in owner/repo format. - Image string `yaml:"image,omitempty"` - // Dockerfile is the path to the Dockerfile (default: Dockerfile). - Dockerfile string `yaml:"dockerfile,omitempty"` - // Tags are the image tags to apply. - Tags []string `yaml:"tags,omitempty"` - // BuildArgs are additional Docker build arguments. - BuildArgs map[string]string `yaml:"build_args,omitempty"` - - // npm-specific configuration - // Package is the npm package name (e.g., "@host-uk/core"). - Package string `yaml:"package,omitempty"` - // Access is the npm access level: "public" or "restricted". - Access string `yaml:"access,omitempty"` - - // Homebrew-specific configuration - // Tap is the Homebrew tap repository (e.g., "host-uk/homebrew-tap"). - Tap string `yaml:"tap,omitempty"` - // Formula is the formula name (defaults to project name). - Formula string `yaml:"formula,omitempty"` - - // Scoop-specific configuration - // Bucket is the Scoop bucket repository (e.g., "host-uk/scoop-bucket"). - Bucket string `yaml:"bucket,omitempty"` - - // AUR-specific configuration - // Maintainer is the AUR package maintainer (e.g., "Name "). - Maintainer string `yaml:"maintainer,omitempty"` - - // Chocolatey-specific configuration - // Push determines whether to push to Chocolatey (false = generate only). - Push bool `yaml:"push,omitempty"` - - // Official repo configuration (for Homebrew, Scoop) - // When enabled, generates files for PR to official repos. - Official *OfficialConfig `yaml:"official,omitempty"` -} - -// OfficialConfig holds configuration for generating files for official repo PRs. -type OfficialConfig struct { - // Enabled determines whether to generate files for official repos. - Enabled bool `yaml:"enabled"` - // Output is the directory to write generated files. - Output string `yaml:"output,omitempty"` -} - -// SDKConfig holds SDK generation configuration. -type SDKConfig struct { - // Spec is the path to the OpenAPI spec file. - Spec string `yaml:"spec,omitempty"` - // Languages to generate. - Languages []string `yaml:"languages,omitempty"` - // Output directory (default: sdk/). - Output string `yaml:"output,omitempty"` - // Package naming. - Package SDKPackageConfig `yaml:"package,omitempty"` - // Diff configuration. - Diff SDKDiffConfig `yaml:"diff,omitempty"` - // Publish configuration. - Publish SDKPublishConfig `yaml:"publish,omitempty"` -} - -// SDKPackageConfig holds package naming configuration. -type SDKPackageConfig struct { - Name string `yaml:"name,omitempty"` - Version string `yaml:"version,omitempty"` -} - -// SDKDiffConfig holds diff configuration. -type SDKDiffConfig struct { - Enabled bool `yaml:"enabled,omitempty"` - FailOnBreaking bool `yaml:"fail_on_breaking,omitempty"` -} - -// SDKPublishConfig holds monorepo publish configuration. -type SDKPublishConfig struct { - Repo string `yaml:"repo,omitempty"` - Path string `yaml:"path,omitempty"` -} - -// ChangelogConfig holds changelog generation settings. -type ChangelogConfig struct { - // Include specifies commit types to include in the changelog. - Include []string `yaml:"include"` - // Exclude specifies commit types to exclude from the changelog. - Exclude []string `yaml:"exclude"` -} - -// LoadConfig loads release configuration from the .core/release.yaml file in the given directory. -// If the config file does not exist, it returns DefaultConfig(). -// Returns an error if the file exists but cannot be parsed. -func LoadConfig(dir string) (*Config, error) { - configPath := filepath.Join(dir, ConfigDir, ConfigFileName) - - data, err := os.ReadFile(configPath) - if err != nil { - if os.IsNotExist(err) { - cfg := DefaultConfig() - cfg.projectDir = dir - return cfg, nil - } - return nil, fmt.Errorf("release.LoadConfig: failed to read config file: %w", err) - } - - var cfg Config - if err := yaml.Unmarshal(data, &cfg); err != nil { - return nil, fmt.Errorf("release.LoadConfig: failed to parse config file: %w", err) - } - - // Apply defaults for any missing fields - applyDefaults(&cfg) - cfg.projectDir = dir - - return &cfg, nil -} - -// DefaultConfig returns sensible defaults for release configuration. -func DefaultConfig() *Config { - return &Config{ - Version: 1, - Project: ProjectConfig{ - Name: "", - Repository: "", - }, - Build: BuildConfig{ - Targets: []TargetConfig{ - {OS: "linux", Arch: "amd64"}, - {OS: "linux", Arch: "arm64"}, - {OS: "darwin", Arch: "amd64"}, - {OS: "darwin", Arch: "arm64"}, - {OS: "windows", Arch: "amd64"}, - }, - }, - Publishers: []PublisherConfig{ - { - Type: "github", - Prerelease: false, - Draft: false, - }, - }, - Changelog: ChangelogConfig{ - Include: []string{"feat", "fix", "perf", "refactor"}, - Exclude: []string{"chore", "docs", "style", "test", "ci"}, - }, - } -} - -// applyDefaults fills in default values for any empty fields in the config. -func applyDefaults(cfg *Config) { - defaults := DefaultConfig() - - if cfg.Version == 0 { - cfg.Version = defaults.Version - } - - if len(cfg.Build.Targets) == 0 { - cfg.Build.Targets = defaults.Build.Targets - } - - if len(cfg.Publishers) == 0 { - cfg.Publishers = defaults.Publishers - } - - if len(cfg.Changelog.Include) == 0 && len(cfg.Changelog.Exclude) == 0 { - cfg.Changelog.Include = defaults.Changelog.Include - cfg.Changelog.Exclude = defaults.Changelog.Exclude - } -} - -// SetProjectDir sets the project directory on the config. -func (c *Config) SetProjectDir(dir string) { - c.projectDir = dir -} - -// SetVersion sets the version override on the config. -func (c *Config) SetVersion(version string) { - c.version = version -} - -// ConfigPath returns the path to the release config file for a given directory. -func ConfigPath(dir string) string { - return filepath.Join(dir, ConfigDir, ConfigFileName) -} - -// ConfigExists checks if a release config file exists in the given directory. -func ConfigExists(dir string) bool { - _, err := os.Stat(ConfigPath(dir)) - return err == nil -} - -// GetRepository returns the repository from the config. -func (c *Config) GetRepository() string { - return c.Project.Repository -} - -// GetProjectName returns the project name from the config. -func (c *Config) GetProjectName() string { - return c.Project.Name -} - -// WriteConfig writes the config to the .core/release.yaml file. -func WriteConfig(cfg *Config, dir string) error { - configPath := ConfigPath(dir) - - // Ensure directory exists - configDir := filepath.Dir(configPath) - if err := os.MkdirAll(configDir, 0755); err != nil { - return fmt.Errorf("release.WriteConfig: failed to create directory: %w", err) - } - - data, err := yaml.Marshal(cfg) - if err != nil { - return fmt.Errorf("release.WriteConfig: failed to marshal config: %w", err) - } - - if err := os.WriteFile(configPath, data, 0644); err != nil { - return fmt.Errorf("release.WriteConfig: failed to write config file: %w", err) - } - - return nil -} diff --git a/pkg/release/config_test.go b/pkg/release/config_test.go deleted file mode 100644 index d214c18..0000000 --- a/pkg/release/config_test.go +++ /dev/null @@ -1,357 +0,0 @@ -package release - -import ( - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// setupConfigTestDir creates a temp directory with optional .core/release.yaml content. -func setupConfigTestDir(t *testing.T, configContent string) string { - t.Helper() - dir := t.TempDir() - - if configContent != "" { - coreDir := filepath.Join(dir, ConfigDir) - err := os.MkdirAll(coreDir, 0755) - require.NoError(t, err) - - configPath := filepath.Join(coreDir, ConfigFileName) - err = os.WriteFile(configPath, []byte(configContent), 0644) - require.NoError(t, err) - } - - return dir -} - -func TestLoadConfig_Good(t *testing.T) { - t.Run("loads valid config", func(t *testing.T) { - content := ` -version: 1 -project: - name: myapp - repository: owner/repo -build: - targets: - - os: linux - arch: amd64 - - os: darwin - arch: arm64 -publishers: - - type: github - prerelease: true - draft: false -changelog: - include: - - feat - - fix - exclude: - - chore -` - dir := setupConfigTestDir(t, content) - - cfg, err := LoadConfig(dir) - require.NoError(t, err) - require.NotNil(t, cfg) - - assert.Equal(t, 1, cfg.Version) - assert.Equal(t, "myapp", cfg.Project.Name) - assert.Equal(t, "owner/repo", cfg.Project.Repository) - assert.Len(t, cfg.Build.Targets, 2) - assert.Equal(t, "linux", cfg.Build.Targets[0].OS) - assert.Equal(t, "amd64", cfg.Build.Targets[0].Arch) - assert.Equal(t, "darwin", cfg.Build.Targets[1].OS) - assert.Equal(t, "arm64", cfg.Build.Targets[1].Arch) - assert.Len(t, cfg.Publishers, 1) - assert.Equal(t, "github", cfg.Publishers[0].Type) - assert.True(t, cfg.Publishers[0].Prerelease) - assert.False(t, cfg.Publishers[0].Draft) - assert.Equal(t, []string{"feat", "fix"}, cfg.Changelog.Include) - assert.Equal(t, []string{"chore"}, cfg.Changelog.Exclude) - }) - - t.Run("returns defaults when config file missing", func(t *testing.T) { - dir := t.TempDir() - - cfg, err := LoadConfig(dir) - require.NoError(t, err) - require.NotNil(t, cfg) - - defaults := DefaultConfig() - assert.Equal(t, defaults.Version, cfg.Version) - assert.Equal(t, defaults.Build.Targets, cfg.Build.Targets) - assert.Equal(t, defaults.Publishers, cfg.Publishers) - assert.Equal(t, defaults.Changelog.Include, cfg.Changelog.Include) - assert.Equal(t, defaults.Changelog.Exclude, cfg.Changelog.Exclude) - }) - - t.Run("applies defaults for missing fields", func(t *testing.T) { - content := ` -version: 2 -project: - name: partial -` - dir := setupConfigTestDir(t, content) - - cfg, err := LoadConfig(dir) - require.NoError(t, err) - require.NotNil(t, cfg) - - // Explicit values preserved - assert.Equal(t, 2, cfg.Version) - assert.Equal(t, "partial", cfg.Project.Name) - - // Defaults applied - defaults := DefaultConfig() - assert.Equal(t, defaults.Build.Targets, cfg.Build.Targets) - assert.Equal(t, defaults.Publishers, cfg.Publishers) - }) - - t.Run("sets project directory on load", func(t *testing.T) { - dir := setupConfigTestDir(t, "version: 1") - - cfg, err := LoadConfig(dir) - require.NoError(t, err) - assert.Equal(t, dir, cfg.projectDir) - }) -} - -func TestLoadConfig_Bad(t *testing.T) { - t.Run("returns error for invalid YAML", func(t *testing.T) { - content := ` -version: 1 -project: - name: [invalid yaml -` - dir := setupConfigTestDir(t, content) - - cfg, err := LoadConfig(dir) - assert.Error(t, err) - assert.Nil(t, cfg) - assert.Contains(t, err.Error(), "failed to parse config file") - }) - - t.Run("returns error for unreadable file", func(t *testing.T) { - dir := t.TempDir() - coreDir := filepath.Join(dir, ConfigDir) - err := os.MkdirAll(coreDir, 0755) - require.NoError(t, err) - - // Create config as a directory instead of file - configPath := filepath.Join(coreDir, ConfigFileName) - err = os.Mkdir(configPath, 0755) - require.NoError(t, err) - - cfg, err := LoadConfig(dir) - assert.Error(t, err) - assert.Nil(t, cfg) - assert.Contains(t, err.Error(), "failed to read config file") - }) -} - -func TestDefaultConfig_Good(t *testing.T) { - t.Run("returns sensible defaults", func(t *testing.T) { - cfg := DefaultConfig() - - assert.Equal(t, 1, cfg.Version) - assert.Empty(t, cfg.Project.Name) - assert.Empty(t, cfg.Project.Repository) - - // Default targets - assert.Len(t, cfg.Build.Targets, 5) - hasLinuxAmd64 := false - hasDarwinArm64 := false - hasWindowsAmd64 := false - for _, target := range cfg.Build.Targets { - if target.OS == "linux" && target.Arch == "amd64" { - hasLinuxAmd64 = true - } - if target.OS == "darwin" && target.Arch == "arm64" { - hasDarwinArm64 = true - } - if target.OS == "windows" && target.Arch == "amd64" { - hasWindowsAmd64 = true - } - } - assert.True(t, hasLinuxAmd64) - assert.True(t, hasDarwinArm64) - assert.True(t, hasWindowsAmd64) - - // Default publisher - assert.Len(t, cfg.Publishers, 1) - assert.Equal(t, "github", cfg.Publishers[0].Type) - assert.False(t, cfg.Publishers[0].Prerelease) - assert.False(t, cfg.Publishers[0].Draft) - - // Default changelog settings - assert.Contains(t, cfg.Changelog.Include, "feat") - assert.Contains(t, cfg.Changelog.Include, "fix") - assert.Contains(t, cfg.Changelog.Exclude, "chore") - assert.Contains(t, cfg.Changelog.Exclude, "docs") - }) -} - -func TestConfigPath_Good(t *testing.T) { - t.Run("returns correct path", func(t *testing.T) { - path := ConfigPath("/project/root") - assert.Equal(t, "/project/root/.core/release.yaml", path) - }) -} - -func TestConfigExists_Good(t *testing.T) { - t.Run("returns true when config exists", func(t *testing.T) { - dir := setupConfigTestDir(t, "version: 1") - assert.True(t, ConfigExists(dir)) - }) - - t.Run("returns false when config missing", func(t *testing.T) { - dir := t.TempDir() - assert.False(t, ConfigExists(dir)) - }) - - t.Run("returns false when .core dir missing", func(t *testing.T) { - dir := t.TempDir() - assert.False(t, ConfigExists(dir)) - }) -} - -func TestWriteConfig_Good(t *testing.T) { - t.Run("writes config to file", func(t *testing.T) { - dir := t.TempDir() - - cfg := DefaultConfig() - cfg.Project.Name = "testapp" - cfg.Project.Repository = "owner/testapp" - - err := WriteConfig(cfg, dir) - require.NoError(t, err) - - // Verify file exists - assert.True(t, ConfigExists(dir)) - - // Reload and verify - loaded, err := LoadConfig(dir) - require.NoError(t, err) - assert.Equal(t, "testapp", loaded.Project.Name) - assert.Equal(t, "owner/testapp", loaded.Project.Repository) - }) - - t.Run("creates .core directory if missing", func(t *testing.T) { - dir := t.TempDir() - - cfg := DefaultConfig() - err := WriteConfig(cfg, dir) - require.NoError(t, err) - - // Check directory was created - coreDir := filepath.Join(dir, ConfigDir) - info, err := os.Stat(coreDir) - require.NoError(t, err) - assert.True(t, info.IsDir()) - }) -} - -func TestConfig_GetRepository_Good(t *testing.T) { - t.Run("returns repository", func(t *testing.T) { - cfg := &Config{ - Project: ProjectConfig{ - Repository: "owner/repo", - }, - } - assert.Equal(t, "owner/repo", cfg.GetRepository()) - }) - - t.Run("returns empty string when not set", func(t *testing.T) { - cfg := &Config{} - assert.Empty(t, cfg.GetRepository()) - }) -} - -func TestConfig_GetProjectName_Good(t *testing.T) { - t.Run("returns project name", func(t *testing.T) { - cfg := &Config{ - Project: ProjectConfig{ - Name: "myapp", - }, - } - assert.Equal(t, "myapp", cfg.GetProjectName()) - }) - - t.Run("returns empty string when not set", func(t *testing.T) { - cfg := &Config{} - assert.Empty(t, cfg.GetProjectName()) - }) -} - -func TestConfig_SetVersion_Good(t *testing.T) { - t.Run("sets version override", func(t *testing.T) { - cfg := &Config{} - cfg.SetVersion("v1.2.3") - assert.Equal(t, "v1.2.3", cfg.version) - }) -} - -func TestConfig_SetProjectDir_Good(t *testing.T) { - t.Run("sets project directory", func(t *testing.T) { - cfg := &Config{} - cfg.SetProjectDir("/path/to/project") - assert.Equal(t, "/path/to/project", cfg.projectDir) - }) -} - -func TestWriteConfig_Bad(t *testing.T) { - t.Run("returns error for unwritable directory", func(t *testing.T) { - dir := t.TempDir() - - // Create .core directory and make it unwritable - coreDir := filepath.Join(dir, ConfigDir) - err := os.MkdirAll(coreDir, 0755) - require.NoError(t, err) - - // Make directory read-only - err = os.Chmod(coreDir, 0555) - require.NoError(t, err) - defer func() { _ = os.Chmod(coreDir, 0755) }() - - cfg := DefaultConfig() - err = WriteConfig(cfg, dir) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to write config file") - }) - - t.Run("returns error when directory creation fails", func(t *testing.T) { - // Use a path that doesn't exist and can't be created - cfg := DefaultConfig() - err := WriteConfig(cfg, "/nonexistent/path/that/cannot/be/created") - assert.Error(t, err) - }) -} - -func TestApplyDefaults_Good(t *testing.T) { - t.Run("applies version default when zero", func(t *testing.T) { - cfg := &Config{Version: 0} - applyDefaults(cfg) - assert.Equal(t, 1, cfg.Version) - }) - - t.Run("preserves existing version", func(t *testing.T) { - cfg := &Config{Version: 2} - applyDefaults(cfg) - assert.Equal(t, 2, cfg.Version) - }) - - t.Run("applies changelog defaults only when both empty", func(t *testing.T) { - cfg := &Config{ - Changelog: ChangelogConfig{ - Include: []string{"feat"}, - }, - } - applyDefaults(cfg) - // Should not apply defaults because Include is set - assert.Equal(t, []string{"feat"}, cfg.Changelog.Include) - assert.Empty(t, cfg.Changelog.Exclude) - }) -} diff --git a/pkg/release/publishers/aur.go b/pkg/release/publishers/aur.go deleted file mode 100644 index 3dc7016..0000000 --- a/pkg/release/publishers/aur.go +++ /dev/null @@ -1,297 +0,0 @@ -// Package publishers provides release publishing implementations. -package publishers - -import ( - "bytes" - "context" - "embed" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - "text/template" - - "github.com/host-uk/core/pkg/build" -) - -//go:embed templates/aur/*.tmpl -var aurTemplates embed.FS - -// AURConfig holds AUR-specific configuration. -type AURConfig struct { - // Package is the AUR package name. - Package string - // Maintainer is the package maintainer (e.g., "Name "). - Maintainer string - // Official config for generating files for official repo PRs. - Official *OfficialConfig -} - -// AURPublisher publishes releases to AUR. -type AURPublisher struct{} - -// NewAURPublisher creates a new AUR publisher. -func NewAURPublisher() *AURPublisher { - return &AURPublisher{} -} - -// Name returns the publisher's identifier. -func (p *AURPublisher) Name() string { - return "aur" -} - -// Publish publishes the release to AUR. -func (p *AURPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error { - cfg := p.parseConfig(pubCfg, relCfg) - - if cfg.Maintainer == "" { - return fmt.Errorf("aur.Publish: maintainer is required (set publish.aur.maintainer in config)") - } - - repo := "" - if relCfg != nil { - repo = relCfg.GetRepository() - } - if repo == "" { - detectedRepo, err := detectRepository(release.ProjectDir) - if err != nil { - return fmt.Errorf("aur.Publish: could not determine repository: %w", err) - } - repo = detectedRepo - } - - projectName := "" - if relCfg != nil { - projectName = relCfg.GetProjectName() - } - if projectName == "" { - parts := strings.Split(repo, "/") - projectName = parts[len(parts)-1] - } - - packageName := cfg.Package - if packageName == "" { - packageName = projectName - } - - version := strings.TrimPrefix(release.Version, "v") - checksums := buildChecksumMap(release.Artifacts) - - data := aurTemplateData{ - PackageName: packageName, - Description: fmt.Sprintf("%s CLI", projectName), - Repository: repo, - Version: version, - License: "MIT", - BinaryName: projectName, - Maintainer: cfg.Maintainer, - Checksums: checksums, - } - - if dryRun { - return p.dryRunPublish(data, cfg) - } - - return p.executePublish(ctx, release.ProjectDir, data, cfg) -} - -type aurTemplateData struct { - PackageName string - Description string - Repository string - Version string - License string - BinaryName string - Maintainer string - Checksums ChecksumMap -} - -func (p *AURPublisher) parseConfig(pubCfg PublisherConfig, relCfg ReleaseConfig) AURConfig { - cfg := AURConfig{} - - if ext, ok := pubCfg.Extended.(map[string]any); ok { - if pkg, ok := ext["package"].(string); ok && pkg != "" { - cfg.Package = pkg - } - if maintainer, ok := ext["maintainer"].(string); ok && maintainer != "" { - cfg.Maintainer = maintainer - } - if official, ok := ext["official"].(map[string]any); ok { - cfg.Official = &OfficialConfig{} - if enabled, ok := official["enabled"].(bool); ok { - cfg.Official.Enabled = enabled - } - if output, ok := official["output"].(string); ok { - cfg.Official.Output = output - } - } - } - - return cfg -} - -func (p *AURPublisher) dryRunPublish(data aurTemplateData, cfg AURConfig) error { - fmt.Println() - fmt.Println("=== DRY RUN: AUR Publish ===") - fmt.Println() - fmt.Printf("Package: %s-bin\n", data.PackageName) - fmt.Printf("Version: %s\n", data.Version) - fmt.Printf("Maintainer: %s\n", data.Maintainer) - fmt.Printf("Repository: %s\n", data.Repository) - fmt.Println() - - pkgbuild, err := p.renderTemplate("templates/aur/PKGBUILD.tmpl", data) - if err != nil { - return fmt.Errorf("aur.dryRunPublish: %w", err) - } - fmt.Println("Generated PKGBUILD:") - fmt.Println("---") - fmt.Println(pkgbuild) - fmt.Println("---") - fmt.Println() - - srcinfo, err := p.renderTemplate("templates/aur/.SRCINFO.tmpl", data) - if err != nil { - return fmt.Errorf("aur.dryRunPublish: %w", err) - } - fmt.Println("Generated .SRCINFO:") - fmt.Println("---") - fmt.Println(srcinfo) - fmt.Println("---") - fmt.Println() - - fmt.Printf("Would push to AUR: ssh://aur@aur.archlinux.org/%s-bin.git\n", data.PackageName) - fmt.Println() - fmt.Println("=== END DRY RUN ===") - - return nil -} - -func (p *AURPublisher) executePublish(ctx context.Context, projectDir string, data aurTemplateData, cfg AURConfig) error { - pkgbuild, err := p.renderTemplate("templates/aur/PKGBUILD.tmpl", data) - if err != nil { - return fmt.Errorf("aur.Publish: failed to render PKGBUILD: %w", err) - } - - srcinfo, err := p.renderTemplate("templates/aur/.SRCINFO.tmpl", data) - if err != nil { - return fmt.Errorf("aur.Publish: failed to render .SRCINFO: %w", err) - } - - // If official config is enabled, write to output directory - if cfg.Official != nil && cfg.Official.Enabled { - output := cfg.Official.Output - if output == "" { - output = filepath.Join(projectDir, "dist", "aur") - } else if !filepath.IsAbs(output) { - output = filepath.Join(projectDir, output) - } - - if err := os.MkdirAll(output, 0755); err != nil { - return fmt.Errorf("aur.Publish: failed to create output directory: %w", err) - } - - pkgbuildPath := filepath.Join(output, "PKGBUILD") - if err := os.WriteFile(pkgbuildPath, []byte(pkgbuild), 0644); err != nil { - return fmt.Errorf("aur.Publish: failed to write PKGBUILD: %w", err) - } - - srcinfoPath := filepath.Join(output, ".SRCINFO") - if err := os.WriteFile(srcinfoPath, []byte(srcinfo), 0644); err != nil { - return fmt.Errorf("aur.Publish: failed to write .SRCINFO: %w", err) - } - fmt.Printf("Wrote AUR files: %s\n", output) - } - - // Push to AUR if not in official-only mode - if cfg.Official == nil || !cfg.Official.Enabled { - if err := p.pushToAUR(ctx, data, pkgbuild, srcinfo); err != nil { - return err - } - } - - return nil -} - -func (p *AURPublisher) pushToAUR(ctx context.Context, data aurTemplateData, pkgbuild, srcinfo string) error { - aurURL := fmt.Sprintf("ssh://aur@aur.archlinux.org/%s-bin.git", data.PackageName) - - tmpDir, err := os.MkdirTemp("", "aur-package-*") - if err != nil { - return fmt.Errorf("aur.Publish: failed to create temp directory: %w", err) - } - defer os.RemoveAll(tmpDir) - - // Clone existing AUR repo (or initialize new one) - fmt.Printf("Cloning AUR package %s-bin...\n", data.PackageName) - cmd := exec.CommandContext(ctx, "git", "clone", aurURL, tmpDir) - if err := cmd.Run(); err != nil { - // If clone fails, init a new repo - cmd = exec.CommandContext(ctx, "git", "init", tmpDir) - if err := cmd.Run(); err != nil { - return fmt.Errorf("aur.Publish: failed to initialize repo: %w", err) - } - cmd = exec.CommandContext(ctx, "git", "-C", tmpDir, "remote", "add", "origin", aurURL) - if err := cmd.Run(); err != nil { - return fmt.Errorf("aur.Publish: failed to add remote: %w", err) - } - } - - // Write files - if err := os.WriteFile(filepath.Join(tmpDir, "PKGBUILD"), []byte(pkgbuild), 0644); err != nil { - return fmt.Errorf("aur.Publish: failed to write PKGBUILD: %w", err) - } - if err := os.WriteFile(filepath.Join(tmpDir, ".SRCINFO"), []byte(srcinfo), 0644); err != nil { - return fmt.Errorf("aur.Publish: failed to write .SRCINFO: %w", err) - } - - commitMsg := fmt.Sprintf("Update to %s", data.Version) - - cmd = exec.CommandContext(ctx, "git", "add", ".") - cmd.Dir = tmpDir - if err := cmd.Run(); err != nil { - return fmt.Errorf("aur.Publish: git add failed: %w", err) - } - - cmd = exec.CommandContext(ctx, "git", "commit", "-m", commitMsg) - cmd.Dir = tmpDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("aur.Publish: git commit failed: %w", err) - } - - cmd = exec.CommandContext(ctx, "git", "push", "origin", "master") - cmd.Dir = tmpDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("aur.Publish: git push failed: %w", err) - } - - fmt.Printf("Published to AUR: https://aur.archlinux.org/packages/%s-bin\n", data.PackageName) - return nil -} - -func (p *AURPublisher) renderTemplate(name string, data aurTemplateData) (string, error) { - content, err := aurTemplates.ReadFile(name) - if err != nil { - return "", fmt.Errorf("failed to read template %s: %w", name, err) - } - - tmpl, err := template.New(filepath.Base(name)).Parse(string(content)) - if err != nil { - return "", fmt.Errorf("failed to parse template %s: %w", name, err) - } - - var buf bytes.Buffer - if err := tmpl.Execute(&buf, data); err != nil { - return "", fmt.Errorf("failed to execute template %s: %w", name, err) - } - - return buf.String(), nil -} - -// Ensure build package is used -var _ = build.Artifact{} diff --git a/pkg/release/publishers/aur_test.go b/pkg/release/publishers/aur_test.go deleted file mode 100644 index cf0b329..0000000 --- a/pkg/release/publishers/aur_test.go +++ /dev/null @@ -1,223 +0,0 @@ -package publishers - -import ( - "bytes" - "os" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestAURPublisher_Name_Good(t *testing.T) { - t.Run("returns aur", func(t *testing.T) { - p := NewAURPublisher() - assert.Equal(t, "aur", p.Name()) - }) -} - -func TestAURPublisher_ParseConfig_Good(t *testing.T) { - p := NewAURPublisher() - - t.Run("uses defaults when no extended config", func(t *testing.T) { - pubCfg := PublisherConfig{Type: "aur"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Package) - assert.Empty(t, cfg.Maintainer) - assert.Nil(t, cfg.Official) - }) - - t.Run("parses package and maintainer from extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "aur", - Extended: map[string]any{ - "package": "mypackage", - "maintainer": "John Doe ", - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Equal(t, "mypackage", cfg.Package) - assert.Equal(t, "John Doe ", cfg.Maintainer) - }) - - t.Run("parses official config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "aur", - Extended: map[string]any{ - "official": map[string]any{ - "enabled": true, - "output": "dist/aur-files", - }, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - require.NotNil(t, cfg.Official) - assert.True(t, cfg.Official.Enabled) - assert.Equal(t, "dist/aur-files", cfg.Official.Output) - }) - - t.Run("handles missing official fields", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "aur", - Extended: map[string]any{ - "official": map[string]any{}, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - require.NotNil(t, cfg.Official) - assert.False(t, cfg.Official.Enabled) - assert.Empty(t, cfg.Official.Output) - }) -} - -func TestAURPublisher_RenderTemplate_Good(t *testing.T) { - p := NewAURPublisher() - - t.Run("renders PKGBUILD template with data", func(t *testing.T) { - data := aurTemplateData{ - PackageName: "myapp", - Description: "My awesome CLI", - Repository: "owner/myapp", - Version: "1.2.3", - License: "MIT", - BinaryName: "myapp", - Maintainer: "John Doe ", - Checksums: ChecksumMap{ - LinuxAmd64: "abc123", - LinuxArm64: "def456", - }, - } - - result, err := p.renderTemplate("templates/aur/PKGBUILD.tmpl", data) - require.NoError(t, err) - - assert.Contains(t, result, "# Maintainer: John Doe ") - assert.Contains(t, result, "pkgname=myapp-bin") - assert.Contains(t, result, "pkgver=1.2.3") - assert.Contains(t, result, `pkgdesc="My awesome CLI"`) - assert.Contains(t, result, "url=\"https://github.com/owner/myapp\"") - assert.Contains(t, result, "license=('MIT')") - assert.Contains(t, result, "sha256sums_x86_64=('abc123')") - assert.Contains(t, result, "sha256sums_aarch64=('def456')") - }) - - t.Run("renders .SRCINFO template with data", func(t *testing.T) { - data := aurTemplateData{ - PackageName: "myapp", - Description: "My CLI", - Repository: "owner/myapp", - Version: "1.0.0", - License: "MIT", - BinaryName: "myapp", - Maintainer: "Test ", - Checksums: ChecksumMap{ - LinuxAmd64: "checksum1", - LinuxArm64: "checksum2", - }, - } - - result, err := p.renderTemplate("templates/aur/.SRCINFO.tmpl", data) - require.NoError(t, err) - - assert.Contains(t, result, "pkgbase = myapp-bin") - assert.Contains(t, result, "pkgdesc = My CLI") - assert.Contains(t, result, "pkgver = 1.0.0") - assert.Contains(t, result, "arch = x86_64") - assert.Contains(t, result, "arch = aarch64") - assert.Contains(t, result, "sha256sums_x86_64 = checksum1") - assert.Contains(t, result, "sha256sums_aarch64 = checksum2") - assert.Contains(t, result, "pkgname = myapp-bin") - }) -} - -func TestAURPublisher_RenderTemplate_Bad(t *testing.T) { - p := NewAURPublisher() - - t.Run("returns error for non-existent template", func(t *testing.T) { - data := aurTemplateData{} - _, err := p.renderTemplate("templates/aur/nonexistent.tmpl", data) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to read template") - }) -} - -func TestAURPublisher_DryRunPublish_Good(t *testing.T) { - p := NewAURPublisher() - - t.Run("outputs expected dry run information", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - data := aurTemplateData{ - PackageName: "myapp", - Version: "1.0.0", - Maintainer: "John Doe ", - Repository: "owner/repo", - BinaryName: "myapp", - Checksums: ChecksumMap{}, - } - cfg := AURConfig{ - Maintainer: "John Doe ", - } - - err := p.dryRunPublish(data, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "DRY RUN: AUR Publish") - assert.Contains(t, output, "Package: myapp-bin") - assert.Contains(t, output, "Version: 1.0.0") - assert.Contains(t, output, "Maintainer: John Doe ") - assert.Contains(t, output, "Repository: owner/repo") - assert.Contains(t, output, "Generated PKGBUILD:") - assert.Contains(t, output, "Generated .SRCINFO:") - assert.Contains(t, output, "Would push to AUR: ssh://aur@aur.archlinux.org/myapp-bin.git") - assert.Contains(t, output, "END DRY RUN") - }) -} - -func TestAURPublisher_Publish_Bad(t *testing.T) { - p := NewAURPublisher() - - t.Run("fails when maintainer not configured", func(t *testing.T) { - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/project", - } - pubCfg := PublisherConfig{Type: "aur"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err := p.Publish(nil, release, pubCfg, relCfg, false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "maintainer is required") - }) -} - -func TestAURConfig_Defaults_Good(t *testing.T) { - t.Run("has sensible defaults", func(t *testing.T) { - p := NewAURPublisher() - pubCfg := PublisherConfig{Type: "aur"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Package) - assert.Empty(t, cfg.Maintainer) - assert.Nil(t, cfg.Official) - }) -} diff --git a/pkg/release/publishers/chocolatey.go b/pkg/release/publishers/chocolatey.go deleted file mode 100644 index 060bed6..0000000 --- a/pkg/release/publishers/chocolatey.go +++ /dev/null @@ -1,277 +0,0 @@ -// Package publishers provides release publishing implementations. -package publishers - -import ( - "bytes" - "context" - "embed" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - "text/template" - - "github.com/host-uk/core/pkg/build" -) - -//go:embed templates/chocolatey/*.tmpl templates/chocolatey/tools/*.tmpl -var chocolateyTemplates embed.FS - -// ChocolateyConfig holds Chocolatey-specific configuration. -type ChocolateyConfig struct { - // Package is the Chocolatey package name. - Package string - // Push determines whether to push to Chocolatey (false = generate only). - Push bool - // Official config for generating files for official repo PRs. - Official *OfficialConfig -} - -// ChocolateyPublisher publishes releases to Chocolatey. -type ChocolateyPublisher struct{} - -// NewChocolateyPublisher creates a new Chocolatey publisher. -func NewChocolateyPublisher() *ChocolateyPublisher { - return &ChocolateyPublisher{} -} - -// Name returns the publisher's identifier. -func (p *ChocolateyPublisher) Name() string { - return "chocolatey" -} - -// Publish publishes the release to Chocolatey. -func (p *ChocolateyPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error { - cfg := p.parseConfig(pubCfg, relCfg) - - repo := "" - if relCfg != nil { - repo = relCfg.GetRepository() - } - if repo == "" { - detectedRepo, err := detectRepository(release.ProjectDir) - if err != nil { - return fmt.Errorf("chocolatey.Publish: could not determine repository: %w", err) - } - repo = detectedRepo - } - - projectName := "" - if relCfg != nil { - projectName = relCfg.GetProjectName() - } - if projectName == "" { - parts := strings.Split(repo, "/") - projectName = parts[len(parts)-1] - } - - packageName := cfg.Package - if packageName == "" { - packageName = projectName - } - - version := strings.TrimPrefix(release.Version, "v") - checksums := buildChecksumMap(release.Artifacts) - - // Extract authors from repository - authors := strings.Split(repo, "/")[0] - - data := chocolateyTemplateData{ - PackageName: packageName, - Title: fmt.Sprintf("%s CLI", strings.Title(projectName)), - Description: fmt.Sprintf("%s CLI", projectName), - Repository: repo, - Version: version, - License: "MIT", - BinaryName: projectName, - Authors: authors, - Tags: fmt.Sprintf("cli %s", projectName), - Checksums: checksums, - } - - if dryRun { - return p.dryRunPublish(data, cfg) - } - - return p.executePublish(ctx, release.ProjectDir, data, cfg) -} - -type chocolateyTemplateData struct { - PackageName string - Title string - Description string - Repository string - Version string - License string - BinaryName string - Authors string - Tags string - Checksums ChecksumMap -} - -func (p *ChocolateyPublisher) parseConfig(pubCfg PublisherConfig, relCfg ReleaseConfig) ChocolateyConfig { - cfg := ChocolateyConfig{ - Push: false, // Default to generate only - } - - if ext, ok := pubCfg.Extended.(map[string]any); ok { - if pkg, ok := ext["package"].(string); ok && pkg != "" { - cfg.Package = pkg - } - if push, ok := ext["push"].(bool); ok { - cfg.Push = push - } - if official, ok := ext["official"].(map[string]any); ok { - cfg.Official = &OfficialConfig{} - if enabled, ok := official["enabled"].(bool); ok { - cfg.Official.Enabled = enabled - } - if output, ok := official["output"].(string); ok { - cfg.Official.Output = output - } - } - } - - return cfg -} - -func (p *ChocolateyPublisher) dryRunPublish(data chocolateyTemplateData, cfg ChocolateyConfig) error { - fmt.Println() - fmt.Println("=== DRY RUN: Chocolatey Publish ===") - fmt.Println() - fmt.Printf("Package: %s\n", data.PackageName) - fmt.Printf("Version: %s\n", data.Version) - fmt.Printf("Push: %t\n", cfg.Push) - fmt.Printf("Repository: %s\n", data.Repository) - fmt.Println() - - nuspec, err := p.renderTemplate("templates/chocolatey/package.nuspec.tmpl", data) - if err != nil { - return fmt.Errorf("chocolatey.dryRunPublish: %w", err) - } - fmt.Println("Generated package.nuspec:") - fmt.Println("---") - fmt.Println(nuspec) - fmt.Println("---") - fmt.Println() - - install, err := p.renderTemplate("templates/chocolatey/tools/chocolateyinstall.ps1.tmpl", data) - if err != nil { - return fmt.Errorf("chocolatey.dryRunPublish: %w", err) - } - fmt.Println("Generated chocolateyinstall.ps1:") - fmt.Println("---") - fmt.Println(install) - fmt.Println("---") - fmt.Println() - - if cfg.Push { - fmt.Println("Would push to Chocolatey community repo") - } else { - fmt.Println("Would generate package files only (push=false)") - } - fmt.Println() - fmt.Println("=== END DRY RUN ===") - - return nil -} - -func (p *ChocolateyPublisher) executePublish(ctx context.Context, projectDir string, data chocolateyTemplateData, cfg ChocolateyConfig) error { - nuspec, err := p.renderTemplate("templates/chocolatey/package.nuspec.tmpl", data) - if err != nil { - return fmt.Errorf("chocolatey.Publish: failed to render nuspec: %w", err) - } - - install, err := p.renderTemplate("templates/chocolatey/tools/chocolateyinstall.ps1.tmpl", data) - if err != nil { - return fmt.Errorf("chocolatey.Publish: failed to render install script: %w", err) - } - - // Create package directory - output := filepath.Join(projectDir, "dist", "chocolatey") - if cfg.Official != nil && cfg.Official.Enabled && cfg.Official.Output != "" { - output = cfg.Official.Output - if !filepath.IsAbs(output) { - output = filepath.Join(projectDir, output) - } - } - - toolsDir := filepath.Join(output, "tools") - if err := os.MkdirAll(toolsDir, 0755); err != nil { - return fmt.Errorf("chocolatey.Publish: failed to create output directory: %w", err) - } - - // Write files - nuspecPath := filepath.Join(output, fmt.Sprintf("%s.nuspec", data.PackageName)) - if err := os.WriteFile(nuspecPath, []byte(nuspec), 0644); err != nil { - return fmt.Errorf("chocolatey.Publish: failed to write nuspec: %w", err) - } - - installPath := filepath.Join(toolsDir, "chocolateyinstall.ps1") - if err := os.WriteFile(installPath, []byte(install), 0644); err != nil { - return fmt.Errorf("chocolatey.Publish: failed to write install script: %w", err) - } - - fmt.Printf("Wrote Chocolatey package files: %s\n", output) - - // Push to Chocolatey if configured - if cfg.Push { - if err := p.pushToChocolatey(ctx, output, data); err != nil { - return err - } - } - - return nil -} - -func (p *ChocolateyPublisher) pushToChocolatey(ctx context.Context, packageDir string, data chocolateyTemplateData) error { - // Check for CHOCOLATEY_API_KEY - apiKey := os.Getenv("CHOCOLATEY_API_KEY") - if apiKey == "" { - return fmt.Errorf("chocolatey.Publish: CHOCOLATEY_API_KEY environment variable is required for push") - } - - // Pack the package - nupkgPath := filepath.Join(packageDir, fmt.Sprintf("%s.%s.nupkg", data.PackageName, data.Version)) - - cmd := exec.CommandContext(ctx, "choco", "pack", filepath.Join(packageDir, fmt.Sprintf("%s.nuspec", data.PackageName)), "-OutputDirectory", packageDir) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("chocolatey.Publish: choco pack failed: %w", err) - } - - // Push the package - cmd = exec.CommandContext(ctx, "choco", "push", nupkgPath, "--source", "https://push.chocolatey.org/", "--api-key", apiKey) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("chocolatey.Publish: choco push failed: %w", err) - } - - fmt.Printf("Published to Chocolatey: https://community.chocolatey.org/packages/%s\n", data.PackageName) - return nil -} - -func (p *ChocolateyPublisher) renderTemplate(name string, data chocolateyTemplateData) (string, error) { - content, err := chocolateyTemplates.ReadFile(name) - if err != nil { - return "", fmt.Errorf("failed to read template %s: %w", name, err) - } - - tmpl, err := template.New(filepath.Base(name)).Parse(string(content)) - if err != nil { - return "", fmt.Errorf("failed to parse template %s: %w", name, err) - } - - var buf bytes.Buffer - if err := tmpl.Execute(&buf, data); err != nil { - return "", fmt.Errorf("failed to execute template %s: %w", name, err) - } - - return buf.String(), nil -} - -// Ensure build package is used -var _ = build.Artifact{} diff --git a/pkg/release/publishers/chocolatey_test.go b/pkg/release/publishers/chocolatey_test.go deleted file mode 100644 index fe5ea63..0000000 --- a/pkg/release/publishers/chocolatey_test.go +++ /dev/null @@ -1,320 +0,0 @@ -package publishers - -import ( - "bytes" - "os" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestChocolateyPublisher_Name_Good(t *testing.T) { - t.Run("returns chocolatey", func(t *testing.T) { - p := NewChocolateyPublisher() - assert.Equal(t, "chocolatey", p.Name()) - }) -} - -func TestChocolateyPublisher_ParseConfig_Good(t *testing.T) { - p := NewChocolateyPublisher() - - t.Run("uses defaults when no extended config", func(t *testing.T) { - pubCfg := PublisherConfig{Type: "chocolatey"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Package) - assert.False(t, cfg.Push) - assert.Nil(t, cfg.Official) - }) - - t.Run("parses package and push from extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "chocolatey", - Extended: map[string]any{ - "package": "mypackage", - "push": true, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Equal(t, "mypackage", cfg.Package) - assert.True(t, cfg.Push) - }) - - t.Run("parses official config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "chocolatey", - Extended: map[string]any{ - "official": map[string]any{ - "enabled": true, - "output": "dist/choco", - }, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - require.NotNil(t, cfg.Official) - assert.True(t, cfg.Official.Enabled) - assert.Equal(t, "dist/choco", cfg.Official.Output) - }) - - t.Run("handles missing official fields", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "chocolatey", - Extended: map[string]any{ - "official": map[string]any{}, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - require.NotNil(t, cfg.Official) - assert.False(t, cfg.Official.Enabled) - assert.Empty(t, cfg.Official.Output) - }) - - t.Run("handles nil extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "chocolatey", - Extended: nil, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Package) - assert.False(t, cfg.Push) - assert.Nil(t, cfg.Official) - }) - - t.Run("defaults push to false when not specified", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "chocolatey", - Extended: map[string]any{ - "package": "mypackage", - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.False(t, cfg.Push) - }) -} - -func TestChocolateyPublisher_RenderTemplate_Good(t *testing.T) { - p := NewChocolateyPublisher() - - t.Run("renders nuspec template with data", func(t *testing.T) { - data := chocolateyTemplateData{ - PackageName: "myapp", - Title: "MyApp CLI", - Description: "My awesome CLI", - Repository: "owner/myapp", - Version: "1.2.3", - License: "MIT", - BinaryName: "myapp", - Authors: "owner", - Tags: "cli myapp", - Checksums: ChecksumMap{}, - } - - result, err := p.renderTemplate("templates/chocolatey/package.nuspec.tmpl", data) - require.NoError(t, err) - - assert.Contains(t, result, `myapp`) - assert.Contains(t, result, `1.2.3`) - assert.Contains(t, result, `MyApp CLI`) - assert.Contains(t, result, `owner`) - assert.Contains(t, result, `My awesome CLI`) - assert.Contains(t, result, `cli myapp`) - assert.Contains(t, result, "projectUrl>https://github.com/owner/myapp") - assert.Contains(t, result, "releaseNotes>https://github.com/owner/myapp/releases/tag/v1.2.3") - }) - - t.Run("renders install script template with data", func(t *testing.T) { - data := chocolateyTemplateData{ - PackageName: "myapp", - Repository: "owner/myapp", - Version: "1.2.3", - BinaryName: "myapp", - Checksums: ChecksumMap{ - WindowsAmd64: "abc123def456", - }, - } - - result, err := p.renderTemplate("templates/chocolatey/tools/chocolateyinstall.ps1.tmpl", data) - require.NoError(t, err) - - assert.Contains(t, result, "$ErrorActionPreference = 'Stop'") - assert.Contains(t, result, "https://github.com/owner/myapp/releases/download/v1.2.3/myapp-windows-amd64.zip") - assert.Contains(t, result, "packageName = 'myapp'") - assert.Contains(t, result, "checksum64 = 'abc123def456'") - assert.Contains(t, result, "checksumType64 = 'sha256'") - assert.Contains(t, result, "Install-ChocolateyZipPackage") - }) -} - -func TestChocolateyPublisher_RenderTemplate_Bad(t *testing.T) { - p := NewChocolateyPublisher() - - t.Run("returns error for non-existent template", func(t *testing.T) { - data := chocolateyTemplateData{} - _, err := p.renderTemplate("templates/chocolatey/nonexistent.tmpl", data) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to read template") - }) -} - -func TestChocolateyPublisher_DryRunPublish_Good(t *testing.T) { - p := NewChocolateyPublisher() - - t.Run("outputs expected dry run information", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - data := chocolateyTemplateData{ - PackageName: "myapp", - Version: "1.0.0", - Repository: "owner/repo", - BinaryName: "myapp", - Authors: "owner", - Tags: "cli myapp", - Checksums: ChecksumMap{}, - } - cfg := ChocolateyConfig{ - Push: false, - } - - err := p.dryRunPublish(data, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "DRY RUN: Chocolatey Publish") - assert.Contains(t, output, "Package: myapp") - assert.Contains(t, output, "Version: 1.0.0") - assert.Contains(t, output, "Push: false") - assert.Contains(t, output, "Repository: owner/repo") - assert.Contains(t, output, "Generated package.nuspec:") - assert.Contains(t, output, "Generated chocolateyinstall.ps1:") - assert.Contains(t, output, "Would generate package files only (push=false)") - assert.Contains(t, output, "END DRY RUN") - }) - - t.Run("shows push message when push is enabled", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - data := chocolateyTemplateData{ - PackageName: "myapp", - Version: "1.0.0", - BinaryName: "myapp", - Authors: "owner", - Tags: "cli", - Checksums: ChecksumMap{}, - } - cfg := ChocolateyConfig{ - Push: true, - } - - err := p.dryRunPublish(data, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "Push: true") - assert.Contains(t, output, "Would push to Chocolatey community repo") - }) -} - -func TestChocolateyPublisher_ExecutePublish_Bad(t *testing.T) { - p := NewChocolateyPublisher() - - t.Run("fails when CHOCOLATEY_API_KEY not set for push", func(t *testing.T) { - // Ensure CHOCOLATEY_API_KEY is not set - oldKey := os.Getenv("CHOCOLATEY_API_KEY") - os.Unsetenv("CHOCOLATEY_API_KEY") - defer func() { - if oldKey != "" { - os.Setenv("CHOCOLATEY_API_KEY", oldKey) - } - }() - - // Create a temp directory for the test - tmpDir, err := os.MkdirTemp("", "choco-test-*") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - data := chocolateyTemplateData{ - PackageName: "testpkg", - Version: "1.0.0", - BinaryName: "testpkg", - Repository: "owner/repo", - Authors: "owner", - Tags: "cli", - Checksums: ChecksumMap{}, - } - - err = p.pushToChocolatey(nil, tmpDir, data) - assert.Error(t, err) - assert.Contains(t, err.Error(), "CHOCOLATEY_API_KEY environment variable is required") - }) -} - -func TestChocolateyConfig_Defaults_Good(t *testing.T) { - t.Run("has sensible defaults", func(t *testing.T) { - p := NewChocolateyPublisher() - pubCfg := PublisherConfig{Type: "chocolatey"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Package) - assert.False(t, cfg.Push) - assert.Nil(t, cfg.Official) - }) -} - -func TestChocolateyTemplateData_Good(t *testing.T) { - t.Run("struct has all expected fields", func(t *testing.T) { - data := chocolateyTemplateData{ - PackageName: "myapp", - Title: "MyApp CLI", - Description: "description", - Repository: "org/repo", - Version: "1.0.0", - License: "MIT", - BinaryName: "myapp", - Authors: "org", - Tags: "cli tool", - Checksums: ChecksumMap{ - WindowsAmd64: "hash1", - }, - } - - assert.Equal(t, "myapp", data.PackageName) - assert.Equal(t, "MyApp CLI", data.Title) - assert.Equal(t, "description", data.Description) - assert.Equal(t, "org/repo", data.Repository) - assert.Equal(t, "1.0.0", data.Version) - assert.Equal(t, "MIT", data.License) - assert.Equal(t, "myapp", data.BinaryName) - assert.Equal(t, "org", data.Authors) - assert.Equal(t, "cli tool", data.Tags) - assert.Equal(t, "hash1", data.Checksums.WindowsAmd64) - }) -} diff --git a/pkg/release/publishers/docker.go b/pkg/release/publishers/docker.go deleted file mode 100644 index 7d342ab..0000000 --- a/pkg/release/publishers/docker.go +++ /dev/null @@ -1,278 +0,0 @@ -// Package publishers provides release publishing implementations. -package publishers - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" -) - -// DockerConfig holds configuration for the Docker publisher. -type DockerConfig struct { - // Registry is the container registry (default: ghcr.io). - Registry string `yaml:"registry"` - // Image is the image name in owner/repo format. - Image string `yaml:"image"` - // Dockerfile is the path to the Dockerfile (default: Dockerfile). - Dockerfile string `yaml:"dockerfile"` - // Platforms are the target platforms (linux/amd64, linux/arm64). - Platforms []string `yaml:"platforms"` - // Tags are additional tags to apply (supports {{.Version}} template). - Tags []string `yaml:"tags"` - // BuildArgs are additional build arguments. - BuildArgs map[string]string `yaml:"build_args"` -} - -// DockerPublisher builds and publishes Docker images. -type DockerPublisher struct{} - -// NewDockerPublisher creates a new Docker publisher. -func NewDockerPublisher() *DockerPublisher { - return &DockerPublisher{} -} - -// Name returns the publisher's identifier. -func (p *DockerPublisher) Name() string { - return "docker" -} - -// Publish builds and pushes Docker images. -func (p *DockerPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error { - // Validate docker CLI is available - if err := validateDockerCli(); err != nil { - return err - } - - // Parse Docker-specific config from publisher config - dockerCfg := p.parseConfig(pubCfg, relCfg, release.ProjectDir) - - // Validate Dockerfile exists - if _, err := os.Stat(dockerCfg.Dockerfile); err != nil { - return fmt.Errorf("docker.Publish: Dockerfile not found: %s", dockerCfg.Dockerfile) - } - - if dryRun { - return p.dryRunPublish(release, dockerCfg) - } - - return p.executePublish(ctx, release, dockerCfg) -} - -// parseConfig extracts Docker-specific configuration. -func (p *DockerPublisher) parseConfig(pubCfg PublisherConfig, relCfg ReleaseConfig, projectDir string) DockerConfig { - cfg := DockerConfig{ - Registry: "ghcr.io", - Image: "", - Dockerfile: filepath.Join(projectDir, "Dockerfile"), - Platforms: []string{"linux/amd64", "linux/arm64"}, - Tags: []string{"latest", "{{.Version}}"}, - BuildArgs: make(map[string]string), - } - - // Try to get image from repository config - if relCfg != nil && relCfg.GetRepository() != "" { - cfg.Image = relCfg.GetRepository() - } - - // Override from extended config if present - if ext, ok := pubCfg.Extended.(map[string]any); ok { - if registry, ok := ext["registry"].(string); ok && registry != "" { - cfg.Registry = registry - } - if image, ok := ext["image"].(string); ok && image != "" { - cfg.Image = image - } - if dockerfile, ok := ext["dockerfile"].(string); ok && dockerfile != "" { - if filepath.IsAbs(dockerfile) { - cfg.Dockerfile = dockerfile - } else { - cfg.Dockerfile = filepath.Join(projectDir, dockerfile) - } - } - if platforms, ok := ext["platforms"].([]any); ok && len(platforms) > 0 { - cfg.Platforms = make([]string, 0, len(platforms)) - for _, plat := range platforms { - if s, ok := plat.(string); ok { - cfg.Platforms = append(cfg.Platforms, s) - } - } - } - if tags, ok := ext["tags"].([]any); ok && len(tags) > 0 { - cfg.Tags = make([]string, 0, len(tags)) - for _, tag := range tags { - if s, ok := tag.(string); ok { - cfg.Tags = append(cfg.Tags, s) - } - } - } - if buildArgs, ok := ext["build_args"].(map[string]any); ok { - for k, v := range buildArgs { - if s, ok := v.(string); ok { - cfg.BuildArgs[k] = s - } - } - } - } - - return cfg -} - -// dryRunPublish shows what would be done without actually building. -func (p *DockerPublisher) dryRunPublish(release *Release, cfg DockerConfig) error { - fmt.Println() - fmt.Println("=== DRY RUN: Docker Build & Push ===") - fmt.Println() - fmt.Printf("Version: %s\n", release.Version) - fmt.Printf("Registry: %s\n", cfg.Registry) - fmt.Printf("Image: %s\n", cfg.Image) - fmt.Printf("Dockerfile: %s\n", cfg.Dockerfile) - fmt.Printf("Platforms: %s\n", strings.Join(cfg.Platforms, ", ")) - fmt.Println() - - // Resolve tags - tags := p.resolveTags(cfg.Tags, release.Version) - fmt.Println("Tags to be applied:") - for _, tag := range tags { - fullTag := p.buildFullTag(cfg.Registry, cfg.Image, tag) - fmt.Printf(" - %s\n", fullTag) - } - fmt.Println() - - fmt.Println("Would execute command:") - args := p.buildBuildxArgs(cfg, tags, release.Version) - fmt.Printf(" docker %s\n", strings.Join(args, " ")) - - if len(cfg.BuildArgs) > 0 { - fmt.Println() - fmt.Println("Build arguments:") - for k, v := range cfg.BuildArgs { - fmt.Printf(" %s=%s\n", k, v) - } - } - - fmt.Println() - fmt.Println("=== END DRY RUN ===") - - return nil -} - -// executePublish builds and pushes Docker images. -func (p *DockerPublisher) executePublish(ctx context.Context, release *Release, cfg DockerConfig) error { - // Ensure buildx is available and builder is set up - if err := p.ensureBuildx(ctx); err != nil { - return err - } - - // Resolve tags - tags := p.resolveTags(cfg.Tags, release.Version) - - // Build the docker buildx command - args := p.buildBuildxArgs(cfg, tags, release.Version) - - cmd := exec.CommandContext(ctx, "docker", args...) - cmd.Dir = release.ProjectDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - fmt.Printf("Building and pushing Docker image: %s\n", cfg.Image) - if err := cmd.Run(); err != nil { - return fmt.Errorf("docker.Publish: buildx build failed: %w", err) - } - - return nil -} - -// resolveTags expands template variables in tags. -func (p *DockerPublisher) resolveTags(tags []string, version string) []string { - resolved := make([]string, 0, len(tags)) - for _, tag := range tags { - // Replace {{.Version}} with actual version - resolvedTag := strings.ReplaceAll(tag, "{{.Version}}", version) - // Also support simpler {{Version}} syntax - resolvedTag = strings.ReplaceAll(resolvedTag, "{{Version}}", version) - resolved = append(resolved, resolvedTag) - } - return resolved -} - -// buildFullTag builds the full image tag including registry. -func (p *DockerPublisher) buildFullTag(registry, image, tag string) string { - if registry != "" { - return fmt.Sprintf("%s/%s:%s", registry, image, tag) - } - return fmt.Sprintf("%s:%s", image, tag) -} - -// buildBuildxArgs builds the arguments for docker buildx build command. -func (p *DockerPublisher) buildBuildxArgs(cfg DockerConfig, tags []string, version string) []string { - args := []string{"buildx", "build"} - - // Multi-platform support - if len(cfg.Platforms) > 0 { - args = append(args, "--platform", strings.Join(cfg.Platforms, ",")) - } - - // Add all tags - for _, tag := range tags { - fullTag := p.buildFullTag(cfg.Registry, cfg.Image, tag) - args = append(args, "-t", fullTag) - } - - // Dockerfile path - dockerfilePath := cfg.Dockerfile - args = append(args, "-f", dockerfilePath) - - // Build arguments - for k, v := range cfg.BuildArgs { - // Expand version in build args - expandedValue := strings.ReplaceAll(v, "{{.Version}}", version) - expandedValue = strings.ReplaceAll(expandedValue, "{{Version}}", version) - args = append(args, "--build-arg", fmt.Sprintf("%s=%s", k, expandedValue)) - } - - // Always add VERSION build arg - args = append(args, "--build-arg", fmt.Sprintf("VERSION=%s", version)) - - // Push the image - args = append(args, "--push") - - // Build context (current directory) - args = append(args, ".") - - return args -} - -// ensureBuildx ensures docker buildx is available and has a builder. -func (p *DockerPublisher) ensureBuildx(ctx context.Context) error { - // Check if buildx is available - cmd := exec.CommandContext(ctx, "docker", "buildx", "version") - if err := cmd.Run(); err != nil { - return fmt.Errorf("docker: buildx is not available. Install it from https://docs.docker.com/buildx/working-with-buildx/") - } - - // Check if we have a builder, create one if not - cmd = exec.CommandContext(ctx, "docker", "buildx", "inspect", "--bootstrap") - if err := cmd.Run(); err != nil { - // Try to create a builder - cmd = exec.CommandContext(ctx, "docker", "buildx", "create", "--use", "--bootstrap") - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("docker: failed to create buildx builder: %w", err) - } - } - - return nil -} - -// validateDockerCli checks if the docker CLI is available. -func validateDockerCli() error { - cmd := exec.Command("docker", "--version") - if err := cmd.Run(); err != nil { - return fmt.Errorf("docker: docker CLI not found. Install it from https://docs.docker.com/get-docker/") - } - return nil -} diff --git a/pkg/release/publishers/docker_test.go b/pkg/release/publishers/docker_test.go deleted file mode 100644 index f333b07..0000000 --- a/pkg/release/publishers/docker_test.go +++ /dev/null @@ -1,797 +0,0 @@ -package publishers - -import ( - "bytes" - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestDockerPublisher_Name_Good(t *testing.T) { - t.Run("returns docker", func(t *testing.T) { - p := NewDockerPublisher() - assert.Equal(t, "docker", p.Name()) - }) -} - -func TestDockerPublisher_ParseConfig_Good(t *testing.T) { - p := NewDockerPublisher() - - t.Run("uses defaults when no extended config", func(t *testing.T) { - pubCfg := PublisherConfig{Type: "docker"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg, "/project") - - assert.Equal(t, "ghcr.io", cfg.Registry) - assert.Equal(t, "owner/repo", cfg.Image) - assert.Equal(t, "/project/Dockerfile", cfg.Dockerfile) - assert.Equal(t, []string{"linux/amd64", "linux/arm64"}, cfg.Platforms) - assert.Equal(t, []string{"latest", "{{.Version}}"}, cfg.Tags) - }) - - t.Run("parses extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "docker", - Extended: map[string]any{ - "registry": "docker.io", - "image": "myorg/myimage", - "dockerfile": "docker/Dockerfile.prod", - "platforms": []any{"linux/amd64"}, - "tags": []any{"latest", "stable", "{{.Version}}"}, - "build_args": map[string]any{ - "GO_VERSION": "1.21", - }, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg, "/project") - - assert.Equal(t, "docker.io", cfg.Registry) - assert.Equal(t, "myorg/myimage", cfg.Image) - assert.Equal(t, "/project/docker/Dockerfile.prod", cfg.Dockerfile) - assert.Equal(t, []string{"linux/amd64"}, cfg.Platforms) - assert.Equal(t, []string{"latest", "stable", "{{.Version}}"}, cfg.Tags) - assert.Equal(t, "1.21", cfg.BuildArgs["GO_VERSION"]) - }) - - t.Run("handles absolute dockerfile path", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "docker", - Extended: map[string]any{ - "dockerfile": "/absolute/path/Dockerfile", - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg, "/project") - - assert.Equal(t, "/absolute/path/Dockerfile", cfg.Dockerfile) - }) -} - -func TestDockerPublisher_ResolveTags_Good(t *testing.T) { - p := NewDockerPublisher() - - t.Run("resolves version template", func(t *testing.T) { - tags := p.resolveTags([]string{"latest", "{{.Version}}", "stable"}, "v1.2.3") - - assert.Equal(t, []string{"latest", "v1.2.3", "stable"}, tags) - }) - - t.Run("handles simple version syntax", func(t *testing.T) { - tags := p.resolveTags([]string{"{{Version}}"}, "v1.0.0") - - assert.Equal(t, []string{"v1.0.0"}, tags) - }) - - t.Run("handles no templates", func(t *testing.T) { - tags := p.resolveTags([]string{"latest", "stable"}, "v1.2.3") - - assert.Equal(t, []string{"latest", "stable"}, tags) - }) -} - -func TestDockerPublisher_BuildFullTag_Good(t *testing.T) { - p := NewDockerPublisher() - - tests := []struct { - name string - registry string - image string - tag string - expected string - }{ - { - name: "with registry", - registry: "ghcr.io", - image: "owner/repo", - tag: "v1.0.0", - expected: "ghcr.io/owner/repo:v1.0.0", - }, - { - name: "without registry", - registry: "", - image: "myimage", - tag: "latest", - expected: "myimage:latest", - }, - { - name: "docker hub", - registry: "docker.io", - image: "library/nginx", - tag: "alpine", - expected: "docker.io/library/nginx:alpine", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - tag := p.buildFullTag(tc.registry, tc.image, tc.tag) - assert.Equal(t, tc.expected, tag) - }) - } -} - -func TestDockerPublisher_BuildBuildxArgs_Good(t *testing.T) { - p := NewDockerPublisher() - - t.Run("builds basic args", func(t *testing.T) { - cfg := DockerConfig{ - Registry: "ghcr.io", - Image: "owner/repo", - Dockerfile: "/project/Dockerfile", - Platforms: []string{"linux/amd64", "linux/arm64"}, - BuildArgs: make(map[string]string), - } - tags := []string{"latest", "v1.0.0"} - - args := p.buildBuildxArgs(cfg, tags, "v1.0.0") - - assert.Contains(t, args, "buildx") - assert.Contains(t, args, "build") - assert.Contains(t, args, "--platform") - assert.Contains(t, args, "linux/amd64,linux/arm64") - assert.Contains(t, args, "-t") - assert.Contains(t, args, "ghcr.io/owner/repo:latest") - assert.Contains(t, args, "ghcr.io/owner/repo:v1.0.0") - assert.Contains(t, args, "-f") - assert.Contains(t, args, "/project/Dockerfile") - assert.Contains(t, args, "--push") - assert.Contains(t, args, ".") - }) - - t.Run("includes build args", func(t *testing.T) { - cfg := DockerConfig{ - Registry: "ghcr.io", - Image: "owner/repo", - Dockerfile: "/project/Dockerfile", - Platforms: []string{"linux/amd64"}, - BuildArgs: map[string]string{ - "GO_VERSION": "1.21", - "APP_NAME": "myapp", - }, - } - tags := []string{"latest"} - - args := p.buildBuildxArgs(cfg, tags, "v1.0.0") - - assert.Contains(t, args, "--build-arg") - // Check that build args are present (order may vary) - foundGoVersion := false - foundAppName := false - foundVersion := false - for i, arg := range args { - if arg == "--build-arg" && i+1 < len(args) { - if args[i+1] == "GO_VERSION=1.21" { - foundGoVersion = true - } - if args[i+1] == "APP_NAME=myapp" { - foundAppName = true - } - if args[i+1] == "VERSION=v1.0.0" { - foundVersion = true - } - } - } - assert.True(t, foundGoVersion, "GO_VERSION build arg not found") - assert.True(t, foundAppName, "APP_NAME build arg not found") - assert.True(t, foundVersion, "VERSION build arg not found") - }) - - t.Run("expands version in build args", func(t *testing.T) { - cfg := DockerConfig{ - Registry: "ghcr.io", - Image: "owner/repo", - Dockerfile: "/project/Dockerfile", - Platforms: []string{"linux/amd64"}, - BuildArgs: map[string]string{ - "APP_VERSION": "{{.Version}}", - }, - } - tags := []string{"latest"} - - args := p.buildBuildxArgs(cfg, tags, "v2.0.0") - - foundExpandedVersion := false - for i, arg := range args { - if arg == "--build-arg" && i+1 < len(args) { - if args[i+1] == "APP_VERSION=v2.0.0" { - foundExpandedVersion = true - } - } - } - assert.True(t, foundExpandedVersion, "APP_VERSION should be expanded to v2.0.0") - }) -} - -func TestDockerPublisher_Publish_Bad(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test in short mode") - } - - p := NewDockerPublisher() - - t.Run("fails when dockerfile not found", func(t *testing.T) { - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/nonexistent", - } - pubCfg := PublisherConfig{ - Type: "docker", - Extended: map[string]any{ - "dockerfile": "/nonexistent/Dockerfile", - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err := p.Publish(nil, release, pubCfg, relCfg, false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "Dockerfile not found") - }) -} - -func TestDockerConfig_Defaults_Good(t *testing.T) { - t.Run("has sensible defaults", func(t *testing.T) { - p := NewDockerPublisher() - pubCfg := PublisherConfig{Type: "docker"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - cfg := p.parseConfig(pubCfg, relCfg, "/project") - - // Verify defaults - assert.Equal(t, "ghcr.io", cfg.Registry) - assert.Equal(t, "owner/repo", cfg.Image) - assert.Len(t, cfg.Platforms, 2) - assert.Contains(t, cfg.Platforms, "linux/amd64") - assert.Contains(t, cfg.Platforms, "linux/arm64") - assert.Contains(t, cfg.Tags, "latest") - }) -} - -func TestDockerPublisher_DryRunPublish_Good(t *testing.T) { - p := NewDockerPublisher() - - t.Run("outputs expected dry run information", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/project", - } - cfg := DockerConfig{ - Registry: "ghcr.io", - Image: "owner/repo", - Dockerfile: "/project/Dockerfile", - Platforms: []string{"linux/amd64", "linux/arm64"}, - Tags: []string{"latest", "{{.Version}}"}, - BuildArgs: make(map[string]string), - } - - err := p.dryRunPublish(release, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "DRY RUN: Docker Build & Push") - assert.Contains(t, output, "Version: v1.0.0") - assert.Contains(t, output, "Registry: ghcr.io") - assert.Contains(t, output, "Image: owner/repo") - assert.Contains(t, output, "Dockerfile: /project/Dockerfile") - assert.Contains(t, output, "Platforms: linux/amd64, linux/arm64") - assert.Contains(t, output, "Tags to be applied:") - assert.Contains(t, output, "ghcr.io/owner/repo:latest") - assert.Contains(t, output, "ghcr.io/owner/repo:v1.0.0") - assert.Contains(t, output, "Would execute command:") - assert.Contains(t, output, "docker buildx build") - assert.Contains(t, output, "END DRY RUN") - }) - - t.Run("shows build args when present", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/project", - } - cfg := DockerConfig{ - Registry: "docker.io", - Image: "myorg/myapp", - Dockerfile: "/project/Dockerfile", - Platforms: []string{"linux/amd64"}, - Tags: []string{"latest"}, - BuildArgs: map[string]string{ - "GO_VERSION": "1.21", - "APP_NAME": "myapp", - }, - } - - err := p.dryRunPublish(release, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "Build arguments:") - assert.Contains(t, output, "GO_VERSION=1.21") - assert.Contains(t, output, "APP_NAME=myapp") - }) - - t.Run("handles single platform", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v2.0.0", - ProjectDir: "/project", - } - cfg := DockerConfig{ - Registry: "ghcr.io", - Image: "owner/repo", - Dockerfile: "/project/Dockerfile.prod", - Platforms: []string{"linux/amd64"}, - Tags: []string{"stable"}, - BuildArgs: make(map[string]string), - } - - err := p.dryRunPublish(release, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "Platforms: linux/amd64") - assert.Contains(t, output, "ghcr.io/owner/repo:stable") - }) -} - -func TestDockerPublisher_ParseConfig_EdgeCases_Good(t *testing.T) { - p := NewDockerPublisher() - - t.Run("handles nil release config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "docker", - Extended: map[string]any{ - "image": "custom/image", - }, - } - - cfg := p.parseConfig(pubCfg, nil, "/project") - - assert.Equal(t, "custom/image", cfg.Image) - assert.Equal(t, "ghcr.io", cfg.Registry) - }) - - t.Run("handles empty repository in release config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "docker", - Extended: map[string]any{ - "image": "fallback/image", - }, - } - relCfg := &mockReleaseConfig{repository: ""} - - cfg := p.parseConfig(pubCfg, relCfg, "/project") - - assert.Equal(t, "fallback/image", cfg.Image) - }) - - t.Run("extended config overrides repository image", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "docker", - Extended: map[string]any{ - "image": "override/image", - }, - } - relCfg := &mockReleaseConfig{repository: "original/repo"} - - cfg := p.parseConfig(pubCfg, relCfg, "/project") - - assert.Equal(t, "override/image", cfg.Image) - }) - - t.Run("handles mixed build args types", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "docker", - Extended: map[string]any{ - "build_args": map[string]any{ - "STRING_ARG": "value", - "INT_ARG": 123, // Non-string value should be skipped - }, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - cfg := p.parseConfig(pubCfg, relCfg, "/project") - - assert.Equal(t, "value", cfg.BuildArgs["STRING_ARG"]) - _, exists := cfg.BuildArgs["INT_ARG"] - assert.False(t, exists, "non-string build arg should not be included") - }) -} - -func TestDockerPublisher_ResolveTags_EdgeCases_Good(t *testing.T) { - p := NewDockerPublisher() - - t.Run("handles empty tags", func(t *testing.T) { - tags := p.resolveTags([]string{}, "v1.0.0") - assert.Empty(t, tags) - }) - - t.Run("handles multiple version placeholders", func(t *testing.T) { - tags := p.resolveTags([]string{"{{.Version}}", "prefix-{{.Version}}", "{{.Version}}-suffix"}, "v1.2.3") - assert.Equal(t, []string{"v1.2.3", "prefix-v1.2.3", "v1.2.3-suffix"}, tags) - }) - - t.Run("handles mixed template formats", func(t *testing.T) { - tags := p.resolveTags([]string{"{{.Version}}", "{{Version}}", "latest"}, "v3.0.0") - assert.Equal(t, []string{"v3.0.0", "v3.0.0", "latest"}, tags) - }) -} - -func TestDockerPublisher_BuildBuildxArgs_EdgeCases_Good(t *testing.T) { - p := NewDockerPublisher() - - t.Run("handles empty platforms", func(t *testing.T) { - cfg := DockerConfig{ - Registry: "ghcr.io", - Image: "owner/repo", - Dockerfile: "/project/Dockerfile", - Platforms: []string{}, - BuildArgs: make(map[string]string), - } - - args := p.buildBuildxArgs(cfg, []string{"latest"}, "v1.0.0") - - assert.Contains(t, args, "buildx") - assert.Contains(t, args, "build") - // Should not have --platform if empty - foundPlatform := false - for i, arg := range args { - if arg == "--platform" { - foundPlatform = true - // Check the next arg exists (it shouldn't be empty) - if i+1 < len(args) && args[i+1] == "" { - t.Error("platform argument should not be empty string") - } - } - } - assert.False(t, foundPlatform, "should not include --platform when platforms is empty") - }) - - t.Run("handles version expansion in build args", func(t *testing.T) { - cfg := DockerConfig{ - Registry: "ghcr.io", - Image: "owner/repo", - Dockerfile: "/Dockerfile", - Platforms: []string{"linux/amd64"}, - BuildArgs: map[string]string{ - "VERSION": "{{.Version}}", - "SIMPLE_VER": "{{Version}}", - "STATIC_VALUE": "static", - }, - } - - args := p.buildBuildxArgs(cfg, []string{"latest"}, "v2.5.0") - - foundVersionArg := false - foundSimpleArg := false - foundStaticArg := false - foundAutoVersion := false - - for i, arg := range args { - if arg == "--build-arg" && i+1 < len(args) { - switch args[i+1] { - case "VERSION=v2.5.0": - foundVersionArg = true - case "SIMPLE_VER=v2.5.0": - foundSimpleArg = true - case "STATIC_VALUE=static": - foundStaticArg = true - } - // Auto-added VERSION build arg - if args[i+1] == "VERSION=v2.5.0" { - foundAutoVersion = true - } - } - } - - // Note: VERSION is both in BuildArgs and auto-added, so we just check it exists - assert.True(t, foundVersionArg || foundAutoVersion, "VERSION build arg not found") - assert.True(t, foundSimpleArg, "SIMPLE_VER build arg not expanded") - assert.True(t, foundStaticArg, "STATIC_VALUE build arg not found") - }) - - t.Run("handles empty registry", func(t *testing.T) { - cfg := DockerConfig{ - Registry: "", - Image: "localimage", - Dockerfile: "/Dockerfile", - Platforms: []string{"linux/amd64"}, - BuildArgs: make(map[string]string), - } - - args := p.buildBuildxArgs(cfg, []string{"latest"}, "v1.0.0") - - assert.Contains(t, args, "-t") - assert.Contains(t, args, "localimage:latest") - }) -} - -func TestDockerPublisher_Publish_DryRun_Good(t *testing.T) { - // Skip if docker CLI is not available - dry run still validates docker is installed - if err := validateDockerCli(); err != nil { - t.Skip("skipping test: docker CLI not available") - } - - p := NewDockerPublisher() - - t.Run("dry run succeeds with valid Dockerfile", func(t *testing.T) { - // Create temp directory with Dockerfile - tmpDir, err := os.MkdirTemp("", "docker-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - dockerfilePath := filepath.Join(tmpDir, "Dockerfile") - err = os.WriteFile(dockerfilePath, []byte("FROM alpine:latest\n"), 0644) - require.NoError(t, err) - - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{Type: "docker"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err = p.Publish(nil, release, pubCfg, relCfg, true) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "DRY RUN: Docker Build & Push") - }) - - t.Run("dry run uses custom dockerfile path", func(t *testing.T) { - // Create temp directory with custom Dockerfile - tmpDir, err := os.MkdirTemp("", "docker-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - customDir := filepath.Join(tmpDir, "docker") - err = os.MkdirAll(customDir, 0755) - require.NoError(t, err) - - dockerfilePath := filepath.Join(customDir, "Dockerfile.prod") - err = os.WriteFile(dockerfilePath, []byte("FROM alpine:latest\n"), 0644) - require.NoError(t, err) - - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{ - Type: "docker", - Extended: map[string]any{ - "dockerfile": "docker/Dockerfile.prod", - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err = p.Publish(nil, release, pubCfg, relCfg, true) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "Dockerfile.prod") - }) -} - -func TestDockerPublisher_Publish_Validation_Bad(t *testing.T) { - p := NewDockerPublisher() - - t.Run("fails when Dockerfile not found with docker installed", func(t *testing.T) { - if err := validateDockerCli(); err != nil { - t.Skip("skipping test: docker CLI not available") - } - - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/nonexistent/path", - } - pubCfg := PublisherConfig{Type: "docker"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err := p.Publish(nil, release, pubCfg, relCfg, false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "Dockerfile not found") - }) - - t.Run("fails when docker CLI not available", func(t *testing.T) { - if err := validateDockerCli(); err == nil { - t.Skip("skipping test: docker CLI is available") - } - - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/tmp", - } - pubCfg := PublisherConfig{Type: "docker"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err := p.Publish(nil, release, pubCfg, relCfg, false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "docker CLI not found") - }) -} - -func TestValidateDockerCli_Good(t *testing.T) { - t.Run("returns nil when docker is installed", func(t *testing.T) { - err := validateDockerCli() - if err != nil { - // Docker is not installed, which is fine for this test - assert.Contains(t, err.Error(), "docker CLI not found") - } - // If err is nil, docker is installed - that's OK - }) -} - -func TestDockerPublisher_Publish_WithCLI_Good(t *testing.T) { - // These tests run only when docker CLI is available - if err := validateDockerCli(); err != nil { - t.Skip("skipping test: docker CLI not available") - } - - p := NewDockerPublisher() - - t.Run("dry run succeeds with all config options", func(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "docker-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - dockerfilePath := filepath.Join(tmpDir, "Dockerfile") - err = os.WriteFile(dockerfilePath, []byte("FROM alpine:latest\n"), 0644) - require.NoError(t, err) - - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{ - Type: "docker", - Extended: map[string]any{ - "registry": "docker.io", - "image": "myorg/myapp", - "platforms": []any{"linux/amd64", "linux/arm64"}, - "tags": []any{"latest", "{{.Version}}", "stable"}, - "build_args": map[string]any{"GO_VERSION": "1.21"}, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err = p.Publish(nil, release, pubCfg, relCfg, true) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "DRY RUN: Docker Build & Push") - assert.Contains(t, output, "docker.io") - assert.Contains(t, output, "myorg/myapp") - }) - - t.Run("dry run with nil relCfg uses extended image", func(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "docker-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - dockerfilePath := filepath.Join(tmpDir, "Dockerfile") - err = os.WriteFile(dockerfilePath, []byte("FROM alpine:latest\n"), 0644) - require.NoError(t, err) - - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{ - Type: "docker", - Extended: map[string]any{ - "image": "standalone/image", - }, - } - - err = p.Publish(nil, release, pubCfg, nil, true) // nil relCfg - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "standalone/image") - }) - - t.Run("fails with non-existent Dockerfile in non-dry-run", func(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "docker-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - // Don't create a Dockerfile - release := &Release{ - Version: "v1.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{Type: "docker"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err = p.Publish(nil, release, pubCfg, relCfg, false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "Dockerfile not found") - }) -} diff --git a/pkg/release/publishers/github.go b/pkg/release/publishers/github.go deleted file mode 100644 index b1eaf70..0000000 --- a/pkg/release/publishers/github.go +++ /dev/null @@ -1,233 +0,0 @@ -// Package publishers provides release publishing implementations. -package publishers - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" -) - -// GitHubPublisher publishes releases to GitHub using the gh CLI. -type GitHubPublisher struct{} - -// NewGitHubPublisher creates a new GitHub publisher. -func NewGitHubPublisher() *GitHubPublisher { - return &GitHubPublisher{} -} - -// Name returns the publisher's identifier. -func (p *GitHubPublisher) Name() string { - return "github" -} - -// Publish publishes the release to GitHub. -// Uses the gh CLI for creating releases and uploading assets. -func (p *GitHubPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error { - // Determine repository - repo := "" - if relCfg != nil { - repo = relCfg.GetRepository() - } - if repo == "" { - // Try to detect from git remote - detectedRepo, err := detectRepository(release.ProjectDir) - if err != nil { - return fmt.Errorf("github.Publish: could not determine repository: %w", err) - } - repo = detectedRepo - } - - if dryRun { - return p.dryRunPublish(release, pubCfg, repo) - } - - // Validate gh CLI is available and authenticated for actual publish - if err := validateGhCli(); err != nil { - return err - } - - return p.executePublish(ctx, release, pubCfg, repo) -} - -// dryRunPublish shows what would be done without actually publishing. -func (p *GitHubPublisher) dryRunPublish(release *Release, pubCfg PublisherConfig, repo string) error { - fmt.Println() - fmt.Println("=== DRY RUN: GitHub Release ===") - fmt.Println() - fmt.Printf("Repository: %s\n", repo) - fmt.Printf("Version: %s\n", release.Version) - fmt.Printf("Draft: %t\n", pubCfg.Draft) - fmt.Printf("Prerelease: %t\n", pubCfg.Prerelease) - fmt.Println() - - fmt.Println("Would create release with command:") - args := p.buildCreateArgs(release, pubCfg, repo) - fmt.Printf(" gh %s\n", strings.Join(args, " ")) - fmt.Println() - - if len(release.Artifacts) > 0 { - fmt.Println("Would upload artifacts:") - for _, artifact := range release.Artifacts { - fmt.Printf(" - %s\n", filepath.Base(artifact.Path)) - } - } - - fmt.Println() - fmt.Println("Changelog:") - fmt.Println("---") - fmt.Println(release.Changelog) - fmt.Println("---") - fmt.Println() - fmt.Println("=== END DRY RUN ===") - - return nil -} - -// executePublish actually creates the release and uploads artifacts. -func (p *GitHubPublisher) executePublish(ctx context.Context, release *Release, pubCfg PublisherConfig, repo string) error { - // Build the release create command - args := p.buildCreateArgs(release, pubCfg, repo) - - // Add artifact paths to the command - for _, artifact := range release.Artifacts { - args = append(args, artifact.Path) - } - - // Execute gh release create - cmd := exec.CommandContext(ctx, "gh", args...) - cmd.Dir = release.ProjectDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - if err := cmd.Run(); err != nil { - return fmt.Errorf("github.Publish: gh release create failed: %w", err) - } - - return nil -} - -// buildCreateArgs builds the arguments for gh release create. -func (p *GitHubPublisher) buildCreateArgs(release *Release, pubCfg PublisherConfig, repo string) []string { - args := []string{"release", "create", release.Version} - - // Add repository flag - if repo != "" { - args = append(args, "--repo", repo) - } - - // Add title - args = append(args, "--title", release.Version) - - // Add notes (changelog) - if release.Changelog != "" { - args = append(args, "--notes", release.Changelog) - } else { - args = append(args, "--generate-notes") - } - - // Add draft flag - if pubCfg.Draft { - args = append(args, "--draft") - } - - // Add prerelease flag - if pubCfg.Prerelease { - args = append(args, "--prerelease") - } - - return args -} - -// validateGhCli checks if the gh CLI is available and authenticated. -func validateGhCli() error { - // Check if gh is installed - cmd := exec.Command("gh", "--version") - if err := cmd.Run(); err != nil { - return fmt.Errorf("github: gh CLI not found. Install it from https://cli.github.com") - } - - // Check if authenticated - cmd = exec.Command("gh", "auth", "status") - output, err := cmd.CombinedOutput() - if err != nil { - return fmt.Errorf("github: not authenticated with gh CLI. Run 'gh auth login' first") - } - - if !strings.Contains(string(output), "Logged in") { - return fmt.Errorf("github: not authenticated with gh CLI. Run 'gh auth login' first") - } - - return nil -} - -// detectRepository detects the GitHub repository from git remote. -func detectRepository(dir string) (string, error) { - cmd := exec.Command("git", "remote", "get-url", "origin") - cmd.Dir = dir - output, err := cmd.Output() - if err != nil { - return "", fmt.Errorf("failed to get git remote: %w", err) - } - - url := strings.TrimSpace(string(output)) - return parseGitHubRepo(url) -} - -// parseGitHubRepo extracts owner/repo from a GitHub URL. -// Supports: -// - git@github.com:owner/repo.git -// - https://github.com/owner/repo.git -// - https://github.com/owner/repo -func parseGitHubRepo(url string) (string, error) { - // SSH format - if strings.HasPrefix(url, "git@github.com:") { - repo := strings.TrimPrefix(url, "git@github.com:") - repo = strings.TrimSuffix(repo, ".git") - return repo, nil - } - - // HTTPS format - if strings.HasPrefix(url, "https://github.com/") { - repo := strings.TrimPrefix(url, "https://github.com/") - repo = strings.TrimSuffix(repo, ".git") - return repo, nil - } - - return "", fmt.Errorf("not a GitHub URL: %s", url) -} - -// UploadArtifact uploads a single artifact to an existing release. -// This can be used to add artifacts to a release after creation. -func UploadArtifact(ctx context.Context, repo, version, artifactPath string) error { - cmd := exec.CommandContext(ctx, "gh", "release", "upload", version, artifactPath, "--repo", repo) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - if err := cmd.Run(); err != nil { - return fmt.Errorf("github.UploadArtifact: failed to upload %s: %w", artifactPath, err) - } - - return nil -} - -// DeleteRelease deletes a release by tag name. -func DeleteRelease(ctx context.Context, repo, version string) error { - cmd := exec.CommandContext(ctx, "gh", "release", "delete", version, "--repo", repo, "--yes") - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - if err := cmd.Run(); err != nil { - return fmt.Errorf("github.DeleteRelease: failed to delete %s: %w", version, err) - } - - return nil -} - -// ReleaseExists checks if a release exists for the given version. -func ReleaseExists(ctx context.Context, repo, version string) bool { - cmd := exec.CommandContext(ctx, "gh", "release", "view", version, "--repo", repo) - return cmd.Run() == nil -} diff --git a/pkg/release/publishers/github_test.go b/pkg/release/publishers/github_test.go deleted file mode 100644 index 6cc4e3e..0000000 --- a/pkg/release/publishers/github_test.go +++ /dev/null @@ -1,546 +0,0 @@ -package publishers - -import ( - "bytes" - "context" - "os" - "os/exec" - "strings" - "testing" - - "github.com/host-uk/core/pkg/build" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestParseGitHubRepo_Good(t *testing.T) { - tests := []struct { - name string - input string - expected string - }{ - { - name: "SSH URL", - input: "git@github.com:owner/repo.git", - expected: "owner/repo", - }, - { - name: "HTTPS URL with .git", - input: "https://github.com/owner/repo.git", - expected: "owner/repo", - }, - { - name: "HTTPS URL without .git", - input: "https://github.com/owner/repo", - expected: "owner/repo", - }, - { - name: "SSH URL without .git", - input: "git@github.com:owner/repo", - expected: "owner/repo", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result, err := parseGitHubRepo(tc.input) - assert.NoError(t, err) - assert.Equal(t, tc.expected, result) - }) - } -} - -func TestParseGitHubRepo_Bad(t *testing.T) { - tests := []struct { - name string - input string - }{ - { - name: "GitLab URL", - input: "https://gitlab.com/owner/repo.git", - }, - { - name: "Bitbucket URL", - input: "git@bitbucket.org:owner/repo.git", - }, - { - name: "Random URL", - input: "https://example.com/something", - }, - { - name: "Not a URL", - input: "owner/repo", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - _, err := parseGitHubRepo(tc.input) - assert.Error(t, err) - }) - } -} - -func TestGitHubPublisher_Name_Good(t *testing.T) { - t.Run("returns github", func(t *testing.T) { - p := NewGitHubPublisher() - assert.Equal(t, "github", p.Name()) - }) -} - -func TestNewRelease_Good(t *testing.T) { - t.Run("creates release struct", func(t *testing.T) { - r := NewRelease("v1.0.0", nil, "changelog", "/project") - assert.Equal(t, "v1.0.0", r.Version) - assert.Equal(t, "changelog", r.Changelog) - assert.Equal(t, "/project", r.ProjectDir) - assert.Nil(t, r.Artifacts) - }) -} - -func TestNewPublisherConfig_Good(t *testing.T) { - t.Run("creates config struct", func(t *testing.T) { - cfg := NewPublisherConfig("github", true, false, nil) - assert.Equal(t, "github", cfg.Type) - assert.True(t, cfg.Prerelease) - assert.False(t, cfg.Draft) - assert.Nil(t, cfg.Extended) - }) - - t.Run("creates config with extended", func(t *testing.T) { - ext := map[string]any{"key": "value"} - cfg := NewPublisherConfig("docker", false, false, ext) - assert.Equal(t, "docker", cfg.Type) - assert.Equal(t, ext, cfg.Extended) - }) -} - -func TestBuildCreateArgs_Good(t *testing.T) { - p := NewGitHubPublisher() - - t.Run("basic args", func(t *testing.T) { - release := &Release{ - Version: "v1.0.0", - Changelog: "## v1.0.0\n\nChanges", - } - cfg := PublisherConfig{ - Type: "github", - } - - args := p.buildCreateArgs(release, cfg, "owner/repo") - - assert.Contains(t, args, "release") - assert.Contains(t, args, "create") - assert.Contains(t, args, "v1.0.0") - assert.Contains(t, args, "--repo") - assert.Contains(t, args, "owner/repo") - assert.Contains(t, args, "--title") - assert.Contains(t, args, "--notes") - }) - - t.Run("with draft flag", func(t *testing.T) { - release := &Release{ - Version: "v1.0.0", - } - cfg := PublisherConfig{ - Type: "github", - Draft: true, - } - - args := p.buildCreateArgs(release, cfg, "owner/repo") - - assert.Contains(t, args, "--draft") - }) - - t.Run("with prerelease flag", func(t *testing.T) { - release := &Release{ - Version: "v1.0.0", - } - cfg := PublisherConfig{ - Type: "github", - Prerelease: true, - } - - args := p.buildCreateArgs(release, cfg, "owner/repo") - - assert.Contains(t, args, "--prerelease") - }) - - t.Run("generates notes when no changelog", func(t *testing.T) { - release := &Release{ - Version: "v1.0.0", - Changelog: "", - } - cfg := PublisherConfig{ - Type: "github", - } - - args := p.buildCreateArgs(release, cfg, "owner/repo") - - assert.Contains(t, args, "--generate-notes") - }) - - t.Run("with draft and prerelease flags", func(t *testing.T) { - release := &Release{ - Version: "v1.0.0-alpha", - } - cfg := PublisherConfig{ - Type: "github", - Draft: true, - Prerelease: true, - } - - args := p.buildCreateArgs(release, cfg, "owner/repo") - - assert.Contains(t, args, "--draft") - assert.Contains(t, args, "--prerelease") - }) - - t.Run("without repo includes version", func(t *testing.T) { - release := &Release{ - Version: "v2.0.0", - Changelog: "Some changes", - } - cfg := PublisherConfig{ - Type: "github", - } - - args := p.buildCreateArgs(release, cfg, "") - - assert.Contains(t, args, "release") - assert.Contains(t, args, "create") - assert.Contains(t, args, "v2.0.0") - assert.NotContains(t, args, "--repo") - }) -} - -func TestGitHubPublisher_DryRunPublish_Good(t *testing.T) { - p := NewGitHubPublisher() - - t.Run("outputs expected dry run information", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - Changelog: "## Changes\n\n- Feature A\n- Bug fix B", - ProjectDir: "/project", - } - cfg := PublisherConfig{ - Type: "github", - Draft: false, - Prerelease: false, - } - - err := p.dryRunPublish(release, cfg, "owner/repo") - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "DRY RUN: GitHub Release") - assert.Contains(t, output, "Repository: owner/repo") - assert.Contains(t, output, "Version: v1.0.0") - assert.Contains(t, output, "Draft: false") - assert.Contains(t, output, "Prerelease: false") - assert.Contains(t, output, "Would create release with command:") - assert.Contains(t, output, "gh release create") - assert.Contains(t, output, "Changelog:") - assert.Contains(t, output, "## Changes") - assert.Contains(t, output, "END DRY RUN") - }) - - t.Run("shows artifacts when present", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - Changelog: "Changes", - ProjectDir: "/project", - Artifacts: []build.Artifact{ - {Path: "/dist/myapp-darwin-amd64.tar.gz"}, - {Path: "/dist/myapp-linux-amd64.tar.gz"}, - }, - } - cfg := PublisherConfig{Type: "github"} - - err := p.dryRunPublish(release, cfg, "owner/repo") - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "Would upload artifacts:") - assert.Contains(t, output, "myapp-darwin-amd64.tar.gz") - assert.Contains(t, output, "myapp-linux-amd64.tar.gz") - }) - - t.Run("shows draft and prerelease flags", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0-beta", - Changelog: "Beta release", - ProjectDir: "/project", - } - cfg := PublisherConfig{ - Type: "github", - Draft: true, - Prerelease: true, - } - - err := p.dryRunPublish(release, cfg, "owner/repo") - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "Draft: true") - assert.Contains(t, output, "Prerelease: true") - assert.Contains(t, output, "--draft") - assert.Contains(t, output, "--prerelease") - }) -} - -func TestGitHubPublisher_Publish_Good(t *testing.T) { - p := NewGitHubPublisher() - - t.Run("dry run uses repository from config", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - Changelog: "Changes", - ProjectDir: "/tmp", - } - pubCfg := PublisherConfig{Type: "github"} - relCfg := &mockReleaseConfig{repository: "custom/repo"} - - // Dry run should succeed without needing gh CLI - err := p.Publish(nil, release, pubCfg, relCfg, true) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "Repository: custom/repo") - }) -} - -func TestGitHubPublisher_Publish_Bad(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test in short mode") - } - - p := NewGitHubPublisher() - - t.Run("fails when gh CLI not available and not dry run", func(t *testing.T) { - // This test will fail if gh is installed but not authenticated - // or succeed if gh is not installed - release := &Release{ - Version: "v1.0.0", - Changelog: "Changes", - ProjectDir: "/nonexistent", - } - pubCfg := PublisherConfig{Type: "github"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err := p.Publish(context.Background(), release, pubCfg, relCfg, false) - - // Should fail due to either gh not found or not authenticated - assert.Error(t, err) - }) - - t.Run("fails when repository cannot be detected", func(t *testing.T) { - // Create a temp directory that is NOT a git repo - tmpDir, err := os.MkdirTemp("", "github-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - release := &Release{ - Version: "v1.0.0", - Changelog: "Changes", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{Type: "github"} - relCfg := &mockReleaseConfig{repository: ""} // Empty repository - - err = p.Publish(context.Background(), release, pubCfg, relCfg, true) - - // Should fail because detectRepository will fail on non-git dir - assert.Error(t, err) - assert.Contains(t, err.Error(), "could not determine repository") - }) -} - -func TestDetectRepository_Good(t *testing.T) { - t.Run("detects repository from git remote", func(t *testing.T) { - // Create a temp git repo - tmpDir, err := os.MkdirTemp("", "git-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - // Initialize git repo and set remote - cmd := exec.Command("git", "init") - cmd.Dir = tmpDir - require.NoError(t, cmd.Run()) - - cmd = exec.Command("git", "remote", "add", "origin", "git@github.com:test-owner/test-repo.git") - cmd.Dir = tmpDir - require.NoError(t, cmd.Run()) - - repo, err := detectRepository(tmpDir) - require.NoError(t, err) - assert.Equal(t, "test-owner/test-repo", repo) - }) - - t.Run("detects repository from HTTPS remote", func(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "git-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - cmd := exec.Command("git", "init") - cmd.Dir = tmpDir - require.NoError(t, cmd.Run()) - - cmd = exec.Command("git", "remote", "add", "origin", "https://github.com/another-owner/another-repo.git") - cmd.Dir = tmpDir - require.NoError(t, cmd.Run()) - - repo, err := detectRepository(tmpDir) - require.NoError(t, err) - assert.Equal(t, "another-owner/another-repo", repo) - }) -} - -func TestDetectRepository_Bad(t *testing.T) { - t.Run("fails when not a git repository", func(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "no-git-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - _, err = detectRepository(tmpDir) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to get git remote") - }) - - t.Run("fails when directory does not exist", func(t *testing.T) { - _, err := detectRepository("/nonexistent/directory/that/does/not/exist") - assert.Error(t, err) - }) - - t.Run("fails when remote is not GitHub", func(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "git-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - cmd := exec.Command("git", "init") - cmd.Dir = tmpDir - require.NoError(t, cmd.Run()) - - cmd = exec.Command("git", "remote", "add", "origin", "git@gitlab.com:owner/repo.git") - cmd.Dir = tmpDir - require.NoError(t, cmd.Run()) - - _, err = detectRepository(tmpDir) - assert.Error(t, err) - assert.Contains(t, err.Error(), "not a GitHub URL") - }) -} - -func TestValidateGhCli_Bad(t *testing.T) { - // This test verifies the error messages from validateGhCli - // We can't easily mock exec.Command, but we can at least - // verify the function exists and returns expected error types - t.Run("returns error when gh not installed", func(t *testing.T) { - // We can't force gh to not be installed, but we can verify - // the function signature works correctly - err := validateGhCli() - if err != nil { - // Either gh is not installed or not authenticated - assert.True(t, - strings.Contains(err.Error(), "gh CLI not found") || - strings.Contains(err.Error(), "not authenticated"), - "unexpected error: %s", err.Error()) - } - // If err is nil, gh is installed and authenticated - that's OK too - }) -} - -func TestGitHubPublisher_ExecutePublish_Good(t *testing.T) { - // These tests run only when gh CLI is available and authenticated - if err := validateGhCli(); err != nil { - t.Skip("skipping test: gh CLI not available or not authenticated") - } - - p := NewGitHubPublisher() - - t.Run("executePublish builds command with artifacts", func(t *testing.T) { - // We test the command building by checking that it fails appropriately - // with a non-existent release (rather than testing actual release creation) - release := &Release{ - Version: "v999.999.999-test-nonexistent", - Changelog: "Test changelog", - ProjectDir: "/tmp", - Artifacts: []build.Artifact{ - {Path: "/tmp/nonexistent-artifact.tar.gz"}, - }, - } - cfg := PublisherConfig{ - Type: "github", - Draft: true, - Prerelease: true, - } - - // This will fail because the artifact doesn't exist, but it proves - // the code path runs - err := p.executePublish(context.Background(), release, cfg, "test-owner/test-repo-nonexistent") - assert.Error(t, err) // Expected to fail - }) -} - -func TestReleaseExists_Good(t *testing.T) { - // These tests run only when gh CLI is available - if err := validateGhCli(); err != nil { - t.Skip("skipping test: gh CLI not available or not authenticated") - } - - t.Run("returns false for non-existent release", func(t *testing.T) { - ctx := context.Background() - // Use a non-existent repo and version - exists := ReleaseExists(ctx, "nonexistent-owner-12345/nonexistent-repo-67890", "v999.999.999") - assert.False(t, exists) - }) - - t.Run("checks release existence", func(t *testing.T) { - ctx := context.Background() - // Test against a known public repository with releases - // This tests the true path if the release exists - exists := ReleaseExists(ctx, "cli/cli", "v2.0.0") - // We don't assert the result since it depends on network access - // and the release may or may not exist - _ = exists // Just verify function runs without panic - }) -} diff --git a/pkg/release/publishers/homebrew.go b/pkg/release/publishers/homebrew.go deleted file mode 100644 index 4d92261..0000000 --- a/pkg/release/publishers/homebrew.go +++ /dev/null @@ -1,355 +0,0 @@ -// Package publishers provides release publishing implementations. -package publishers - -import ( - "bytes" - "context" - "embed" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - "text/template" - - "github.com/host-uk/core/pkg/build" -) - -//go:embed templates/homebrew/*.tmpl -var homebrewTemplates embed.FS - -// HomebrewConfig holds Homebrew-specific configuration. -type HomebrewConfig struct { - // Tap is the Homebrew tap repository (e.g., "host-uk/homebrew-tap"). - Tap string - // Formula is the formula name (defaults to project name). - Formula string - // Official config for generating files for official repo PRs. - Official *OfficialConfig -} - -// OfficialConfig holds configuration for generating files for official repo PRs. -type OfficialConfig struct { - // Enabled determines whether to generate files for official repos. - Enabled bool - // Output is the directory to write generated files. - Output string -} - -// HomebrewPublisher publishes releases to Homebrew. -type HomebrewPublisher struct{} - -// NewHomebrewPublisher creates a new Homebrew publisher. -func NewHomebrewPublisher() *HomebrewPublisher { - return &HomebrewPublisher{} -} - -// Name returns the publisher's identifier. -func (p *HomebrewPublisher) Name() string { - return "homebrew" -} - -// Publish publishes the release to Homebrew. -func (p *HomebrewPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error { - // Parse config - cfg := p.parseConfig(pubCfg, relCfg) - - // Validate configuration - if cfg.Tap == "" && (cfg.Official == nil || !cfg.Official.Enabled) { - return fmt.Errorf("homebrew.Publish: tap is required (set publish.homebrew.tap in config)") - } - - // Get repository and project info - repo := "" - if relCfg != nil { - repo = relCfg.GetRepository() - } - if repo == "" { - detectedRepo, err := detectRepository(release.ProjectDir) - if err != nil { - return fmt.Errorf("homebrew.Publish: could not determine repository: %w", err) - } - repo = detectedRepo - } - - projectName := "" - if relCfg != nil { - projectName = relCfg.GetProjectName() - } - if projectName == "" { - parts := strings.Split(repo, "/") - projectName = parts[len(parts)-1] - } - - formulaName := cfg.Formula - if formulaName == "" { - formulaName = projectName - } - - // Strip leading 'v' from version - version := strings.TrimPrefix(release.Version, "v") - - // Build checksums map from artifacts - checksums := buildChecksumMap(release.Artifacts) - - // Template data - data := homebrewTemplateData{ - FormulaClass: toFormulaClass(formulaName), - Description: fmt.Sprintf("%s CLI", projectName), - Repository: repo, - Version: version, - License: "MIT", - BinaryName: projectName, - Checksums: checksums, - } - - if dryRun { - return p.dryRunPublish(data, cfg) - } - - return p.executePublish(ctx, release.ProjectDir, data, cfg) -} - -// homebrewTemplateData holds data for Homebrew templates. -type homebrewTemplateData struct { - FormulaClass string - Description string - Repository string - Version string - License string - BinaryName string - Checksums ChecksumMap -} - -// ChecksumMap holds checksums for different platform/arch combinations. -type ChecksumMap struct { - DarwinAmd64 string - DarwinArm64 string - LinuxAmd64 string - LinuxArm64 string - WindowsAmd64 string - WindowsArm64 string -} - -// parseConfig extracts Homebrew-specific configuration. -func (p *HomebrewPublisher) parseConfig(pubCfg PublisherConfig, relCfg ReleaseConfig) HomebrewConfig { - cfg := HomebrewConfig{ - Tap: "", - Formula: "", - } - - if ext, ok := pubCfg.Extended.(map[string]any); ok { - if tap, ok := ext["tap"].(string); ok && tap != "" { - cfg.Tap = tap - } - if formula, ok := ext["formula"].(string); ok && formula != "" { - cfg.Formula = formula - } - if official, ok := ext["official"].(map[string]any); ok { - cfg.Official = &OfficialConfig{} - if enabled, ok := official["enabled"].(bool); ok { - cfg.Official.Enabled = enabled - } - if output, ok := official["output"].(string); ok { - cfg.Official.Output = output - } - } - } - - return cfg -} - -// dryRunPublish shows what would be done. -func (p *HomebrewPublisher) dryRunPublish(data homebrewTemplateData, cfg HomebrewConfig) error { - fmt.Println() - fmt.Println("=== DRY RUN: Homebrew Publish ===") - fmt.Println() - fmt.Printf("Formula: %s\n", data.FormulaClass) - fmt.Printf("Version: %s\n", data.Version) - fmt.Printf("Tap: %s\n", cfg.Tap) - fmt.Printf("Repository: %s\n", data.Repository) - fmt.Println() - - // Generate and show formula - formula, err := p.renderTemplate("templates/homebrew/formula.rb.tmpl", data) - if err != nil { - return fmt.Errorf("homebrew.dryRunPublish: %w", err) - } - fmt.Println("Generated formula.rb:") - fmt.Println("---") - fmt.Println(formula) - fmt.Println("---") - fmt.Println() - - if cfg.Tap != "" { - fmt.Printf("Would commit to tap: %s\n", cfg.Tap) - } - if cfg.Official != nil && cfg.Official.Enabled { - output := cfg.Official.Output - if output == "" { - output = "dist/homebrew" - } - fmt.Printf("Would write files for official PR to: %s\n", output) - } - fmt.Println() - fmt.Println("=== END DRY RUN ===") - - return nil -} - -// executePublish creates the formula and commits to tap. -func (p *HomebrewPublisher) executePublish(ctx context.Context, projectDir string, data homebrewTemplateData, cfg HomebrewConfig) error { - // Generate formula - formula, err := p.renderTemplate("templates/homebrew/formula.rb.tmpl", data) - if err != nil { - return fmt.Errorf("homebrew.Publish: failed to render formula: %w", err) - } - - // If official config is enabled, write to output directory - if cfg.Official != nil && cfg.Official.Enabled { - output := cfg.Official.Output - if output == "" { - output = filepath.Join(projectDir, "dist", "homebrew") - } else if !filepath.IsAbs(output) { - output = filepath.Join(projectDir, output) - } - - if err := os.MkdirAll(output, 0755); err != nil { - return fmt.Errorf("homebrew.Publish: failed to create output directory: %w", err) - } - - formulaPath := filepath.Join(output, fmt.Sprintf("%s.rb", strings.ToLower(data.FormulaClass))) - if err := os.WriteFile(formulaPath, []byte(formula), 0644); err != nil { - return fmt.Errorf("homebrew.Publish: failed to write formula: %w", err) - } - fmt.Printf("Wrote Homebrew formula for official PR: %s\n", formulaPath) - } - - // If tap is configured, commit to it - if cfg.Tap != "" { - if err := p.commitToTap(ctx, cfg.Tap, data, formula); err != nil { - return err - } - } - - return nil -} - -// commitToTap commits the formula to the tap repository. -func (p *HomebrewPublisher) commitToTap(ctx context.Context, tap string, data homebrewTemplateData, formula string) error { - // Clone tap repo to temp directory - tmpDir, err := os.MkdirTemp("", "homebrew-tap-*") - if err != nil { - return fmt.Errorf("homebrew.Publish: failed to create temp directory: %w", err) - } - defer os.RemoveAll(tmpDir) - - // Clone the tap - fmt.Printf("Cloning tap %s...\n", tap) - cmd := exec.CommandContext(ctx, "gh", "repo", "clone", tap, tmpDir, "--", "--depth=1") - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("homebrew.Publish: failed to clone tap: %w", err) - } - - // Ensure Formula directory exists - formulaDir := filepath.Join(tmpDir, "Formula") - if err := os.MkdirAll(formulaDir, 0755); err != nil { - return fmt.Errorf("homebrew.Publish: failed to create Formula directory: %w", err) - } - - // Write formula - formulaPath := filepath.Join(formulaDir, fmt.Sprintf("%s.rb", strings.ToLower(data.FormulaClass))) - if err := os.WriteFile(formulaPath, []byte(formula), 0644); err != nil { - return fmt.Errorf("homebrew.Publish: failed to write formula: %w", err) - } - - // Git add, commit, push - commitMsg := fmt.Sprintf("Update %s to %s", data.FormulaClass, data.Version) - - cmd = exec.CommandContext(ctx, "git", "add", ".") - cmd.Dir = tmpDir - if err := cmd.Run(); err != nil { - return fmt.Errorf("homebrew.Publish: git add failed: %w", err) - } - - cmd = exec.CommandContext(ctx, "git", "commit", "-m", commitMsg) - cmd.Dir = tmpDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("homebrew.Publish: git commit failed: %w", err) - } - - cmd = exec.CommandContext(ctx, "git", "push") - cmd.Dir = tmpDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("homebrew.Publish: git push failed: %w", err) - } - - fmt.Printf("Updated Homebrew tap: %s\n", tap) - return nil -} - -// renderTemplate renders an embedded template with the given data. -func (p *HomebrewPublisher) renderTemplate(name string, data homebrewTemplateData) (string, error) { - content, err := homebrewTemplates.ReadFile(name) - if err != nil { - return "", fmt.Errorf("failed to read template %s: %w", name, err) - } - - tmpl, err := template.New(filepath.Base(name)).Parse(string(content)) - if err != nil { - return "", fmt.Errorf("failed to parse template %s: %w", name, err) - } - - var buf bytes.Buffer - if err := tmpl.Execute(&buf, data); err != nil { - return "", fmt.Errorf("failed to execute template %s: %w", name, err) - } - - return buf.String(), nil -} - -// toFormulaClass converts a package name to a Ruby class name. -func toFormulaClass(name string) string { - // Convert kebab-case to PascalCase - parts := strings.Split(name, "-") - for i, part := range parts { - if len(part) > 0 { - parts[i] = strings.ToUpper(part[:1]) + part[1:] - } - } - return strings.Join(parts, "") -} - -// buildChecksumMap extracts checksums from artifacts into a structured map. -func buildChecksumMap(artifacts []build.Artifact) ChecksumMap { - checksums := ChecksumMap{} - - for _, a := range artifacts { - // Parse artifact name to determine platform - name := filepath.Base(a.Path) - checksum := a.Checksum - - switch { - case strings.Contains(name, "darwin-amd64"): - checksums.DarwinAmd64 = checksum - case strings.Contains(name, "darwin-arm64"): - checksums.DarwinArm64 = checksum - case strings.Contains(name, "linux-amd64"): - checksums.LinuxAmd64 = checksum - case strings.Contains(name, "linux-arm64"): - checksums.LinuxArm64 = checksum - case strings.Contains(name, "windows-amd64"): - checksums.WindowsAmd64 = checksum - case strings.Contains(name, "windows-arm64"): - checksums.WindowsArm64 = checksum - } - } - - return checksums -} diff --git a/pkg/release/publishers/homebrew_test.go b/pkg/release/publishers/homebrew_test.go deleted file mode 100644 index e77011e..0000000 --- a/pkg/release/publishers/homebrew_test.go +++ /dev/null @@ -1,344 +0,0 @@ -package publishers - -import ( - "bytes" - "os" - "testing" - - "github.com/host-uk/core/pkg/build" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestHomebrewPublisher_Name_Good(t *testing.T) { - t.Run("returns homebrew", func(t *testing.T) { - p := NewHomebrewPublisher() - assert.Equal(t, "homebrew", p.Name()) - }) -} - -func TestHomebrewPublisher_ParseConfig_Good(t *testing.T) { - p := NewHomebrewPublisher() - - t.Run("uses defaults when no extended config", func(t *testing.T) { - pubCfg := PublisherConfig{Type: "homebrew"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Tap) - assert.Empty(t, cfg.Formula) - assert.Nil(t, cfg.Official) - }) - - t.Run("parses tap and formula from extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "homebrew", - Extended: map[string]any{ - "tap": "host-uk/homebrew-tap", - "formula": "myformula", - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Equal(t, "host-uk/homebrew-tap", cfg.Tap) - assert.Equal(t, "myformula", cfg.Formula) - }) - - t.Run("parses official config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "homebrew", - Extended: map[string]any{ - "official": map[string]any{ - "enabled": true, - "output": "dist/brew", - }, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - require.NotNil(t, cfg.Official) - assert.True(t, cfg.Official.Enabled) - assert.Equal(t, "dist/brew", cfg.Official.Output) - }) - - t.Run("handles missing official fields", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "homebrew", - Extended: map[string]any{ - "official": map[string]any{}, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - require.NotNil(t, cfg.Official) - assert.False(t, cfg.Official.Enabled) - assert.Empty(t, cfg.Official.Output) - }) -} - -func TestHomebrewPublisher_ToFormulaClass_Good(t *testing.T) { - tests := []struct { - name string - input string - expected string - }{ - { - name: "simple name", - input: "core", - expected: "Core", - }, - { - name: "kebab case", - input: "my-cli-tool", - expected: "MyCliTool", - }, - { - name: "already capitalised", - input: "CLI", - expected: "CLI", - }, - { - name: "single letter", - input: "x", - expected: "X", - }, - { - name: "multiple dashes", - input: "my-super-cool-app", - expected: "MySuperCoolApp", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result := toFormulaClass(tc.input) - assert.Equal(t, tc.expected, result) - }) - } -} - -func TestHomebrewPublisher_BuildChecksumMap_Good(t *testing.T) { - t.Run("maps artifacts to checksums by platform", func(t *testing.T) { - artifacts := []build.Artifact{ - {Path: "/dist/myapp-darwin-amd64.tar.gz", OS: "darwin", Arch: "amd64", Checksum: "abc123"}, - {Path: "/dist/myapp-darwin-arm64.tar.gz", OS: "darwin", Arch: "arm64", Checksum: "def456"}, - {Path: "/dist/myapp-linux-amd64.tar.gz", OS: "linux", Arch: "amd64", Checksum: "ghi789"}, - {Path: "/dist/myapp-linux-arm64.tar.gz", OS: "linux", Arch: "arm64", Checksum: "jkl012"}, - {Path: "/dist/myapp-windows-amd64.zip", OS: "windows", Arch: "amd64", Checksum: "mno345"}, - {Path: "/dist/myapp-windows-arm64.zip", OS: "windows", Arch: "arm64", Checksum: "pqr678"}, - } - - checksums := buildChecksumMap(artifacts) - - assert.Equal(t, "abc123", checksums.DarwinAmd64) - assert.Equal(t, "def456", checksums.DarwinArm64) - assert.Equal(t, "ghi789", checksums.LinuxAmd64) - assert.Equal(t, "jkl012", checksums.LinuxArm64) - assert.Equal(t, "mno345", checksums.WindowsAmd64) - assert.Equal(t, "pqr678", checksums.WindowsArm64) - }) - - t.Run("handles empty artifacts", func(t *testing.T) { - checksums := buildChecksumMap([]build.Artifact{}) - - assert.Empty(t, checksums.DarwinAmd64) - assert.Empty(t, checksums.DarwinArm64) - assert.Empty(t, checksums.LinuxAmd64) - assert.Empty(t, checksums.LinuxArm64) - }) - - t.Run("handles partial platform coverage", func(t *testing.T) { - artifacts := []build.Artifact{ - {Path: "/dist/myapp-darwin-arm64.tar.gz", Checksum: "def456"}, - {Path: "/dist/myapp-linux-amd64.tar.gz", Checksum: "ghi789"}, - } - - checksums := buildChecksumMap(artifacts) - - assert.Empty(t, checksums.DarwinAmd64) - assert.Equal(t, "def456", checksums.DarwinArm64) - assert.Equal(t, "ghi789", checksums.LinuxAmd64) - assert.Empty(t, checksums.LinuxArm64) - }) -} - -func TestHomebrewPublisher_RenderTemplate_Good(t *testing.T) { - p := NewHomebrewPublisher() - - t.Run("renders formula template with data", func(t *testing.T) { - data := homebrewTemplateData{ - FormulaClass: "MyApp", - Description: "My awesome CLI", - Repository: "owner/myapp", - Version: "1.2.3", - License: "MIT", - BinaryName: "myapp", - Checksums: ChecksumMap{ - DarwinAmd64: "abc123", - DarwinArm64: "def456", - LinuxAmd64: "ghi789", - LinuxArm64: "jkl012", - }, - } - - result, err := p.renderTemplate("templates/homebrew/formula.rb.tmpl", data) - require.NoError(t, err) - - assert.Contains(t, result, "class MyApp < Formula") - assert.Contains(t, result, `desc "My awesome CLI"`) - assert.Contains(t, result, `version "1.2.3"`) - assert.Contains(t, result, `license "MIT"`) - assert.Contains(t, result, "owner/myapp") - assert.Contains(t, result, "abc123") - assert.Contains(t, result, "def456") - assert.Contains(t, result, "ghi789") - assert.Contains(t, result, "jkl012") - assert.Contains(t, result, `bin.install "myapp"`) - }) -} - -func TestHomebrewPublisher_RenderTemplate_Bad(t *testing.T) { - p := NewHomebrewPublisher() - - t.Run("returns error for non-existent template", func(t *testing.T) { - data := homebrewTemplateData{} - _, err := p.renderTemplate("templates/homebrew/nonexistent.tmpl", data) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to read template") - }) -} - -func TestHomebrewPublisher_DryRunPublish_Good(t *testing.T) { - p := NewHomebrewPublisher() - - t.Run("outputs expected dry run information", func(t *testing.T) { - // Capture stdout - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - data := homebrewTemplateData{ - FormulaClass: "MyApp", - Description: "My CLI", - Repository: "owner/repo", - Version: "1.0.0", - License: "MIT", - BinaryName: "myapp", - Checksums: ChecksumMap{}, - } - cfg := HomebrewConfig{ - Tap: "owner/homebrew-tap", - } - - err := p.dryRunPublish(data, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "DRY RUN: Homebrew Publish") - assert.Contains(t, output, "Formula: MyApp") - assert.Contains(t, output, "Version: 1.0.0") - assert.Contains(t, output, "Tap: owner/homebrew-tap") - assert.Contains(t, output, "Repository: owner/repo") - assert.Contains(t, output, "Would commit to tap: owner/homebrew-tap") - assert.Contains(t, output, "END DRY RUN") - }) - - t.Run("shows official output path when enabled", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - data := homebrewTemplateData{ - FormulaClass: "MyApp", - Version: "1.0.0", - BinaryName: "myapp", - Checksums: ChecksumMap{}, - } - cfg := HomebrewConfig{ - Official: &OfficialConfig{ - Enabled: true, - Output: "custom/path", - }, - } - - err := p.dryRunPublish(data, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "Would write files for official PR to: custom/path") - }) - - t.Run("uses default official output path when not specified", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - data := homebrewTemplateData{ - FormulaClass: "MyApp", - Version: "1.0.0", - BinaryName: "myapp", - Checksums: ChecksumMap{}, - } - cfg := HomebrewConfig{ - Official: &OfficialConfig{ - Enabled: true, - }, - } - - err := p.dryRunPublish(data, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "Would write files for official PR to: dist/homebrew") - }) -} - -func TestHomebrewPublisher_Publish_Bad(t *testing.T) { - p := NewHomebrewPublisher() - - t.Run("fails when tap not configured and not official mode", func(t *testing.T) { - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/project", - } - pubCfg := PublisherConfig{Type: "homebrew"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err := p.Publish(nil, release, pubCfg, relCfg, false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "tap is required") - }) -} - -func TestHomebrewConfig_Defaults_Good(t *testing.T) { - t.Run("has sensible defaults", func(t *testing.T) { - p := NewHomebrewPublisher() - pubCfg := PublisherConfig{Type: "homebrew"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Tap) - assert.Empty(t, cfg.Formula) - assert.Nil(t, cfg.Official) - }) -} diff --git a/pkg/release/publishers/linuxkit.go b/pkg/release/publishers/linuxkit.go deleted file mode 100644 index 2a5ca82..0000000 --- a/pkg/release/publishers/linuxkit.go +++ /dev/null @@ -1,300 +0,0 @@ -// Package publishers provides release publishing implementations. -package publishers - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" -) - -// LinuxKitConfig holds configuration for the LinuxKit publisher. -type LinuxKitConfig struct { - // Config is the path to the LinuxKit YAML configuration file. - Config string `yaml:"config"` - // Formats are the output formats to build. - // Supported: iso, iso-bios, iso-efi, raw, raw-bios, raw-efi, - // qcow2, qcow2-bios, qcow2-efi, vmdk, vhd, gcp, aws, - // docker (tarball for `docker load`), tar, kernel+initrd - Formats []string `yaml:"formats"` - // Platforms are the target platforms (linux/amd64, linux/arm64). - Platforms []string `yaml:"platforms"` -} - -// LinuxKitPublisher builds and publishes LinuxKit images. -type LinuxKitPublisher struct{} - -// NewLinuxKitPublisher creates a new LinuxKit publisher. -func NewLinuxKitPublisher() *LinuxKitPublisher { - return &LinuxKitPublisher{} -} - -// Name returns the publisher's identifier. -func (p *LinuxKitPublisher) Name() string { - return "linuxkit" -} - -// Publish builds LinuxKit images and uploads them to the GitHub release. -func (p *LinuxKitPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error { - // Validate linuxkit CLI is available - if err := validateLinuxKitCli(); err != nil { - return err - } - - // Parse LinuxKit-specific config from publisher config - lkCfg := p.parseConfig(pubCfg, release.ProjectDir) - - // Validate config file exists - if _, err := os.Stat(lkCfg.Config); err != nil { - return fmt.Errorf("linuxkit.Publish: config file not found: %s", lkCfg.Config) - } - - // Determine repository for artifact upload - repo := "" - if relCfg != nil { - repo = relCfg.GetRepository() - } - if repo == "" { - detectedRepo, err := detectRepository(release.ProjectDir) - if err != nil { - return fmt.Errorf("linuxkit.Publish: could not determine repository: %w", err) - } - repo = detectedRepo - } - - if dryRun { - return p.dryRunPublish(release, lkCfg, repo) - } - - return p.executePublish(ctx, release, lkCfg, repo) -} - -// parseConfig extracts LinuxKit-specific configuration. -func (p *LinuxKitPublisher) parseConfig(pubCfg PublisherConfig, projectDir string) LinuxKitConfig { - cfg := LinuxKitConfig{ - Config: filepath.Join(projectDir, ".core", "linuxkit", "server.yml"), - Formats: []string{"iso"}, - Platforms: []string{"linux/amd64"}, - } - - // Override from extended config if present - if ext, ok := pubCfg.Extended.(map[string]any); ok { - if configPath, ok := ext["config"].(string); ok && configPath != "" { - if filepath.IsAbs(configPath) { - cfg.Config = configPath - } else { - cfg.Config = filepath.Join(projectDir, configPath) - } - } - if formats, ok := ext["formats"].([]any); ok && len(formats) > 0 { - cfg.Formats = make([]string, 0, len(formats)) - for _, f := range formats { - if s, ok := f.(string); ok { - cfg.Formats = append(cfg.Formats, s) - } - } - } - if platforms, ok := ext["platforms"].([]any); ok && len(platforms) > 0 { - cfg.Platforms = make([]string, 0, len(platforms)) - for _, p := range platforms { - if s, ok := p.(string); ok { - cfg.Platforms = append(cfg.Platforms, s) - } - } - } - } - - return cfg -} - -// dryRunPublish shows what would be done without actually building. -func (p *LinuxKitPublisher) dryRunPublish(release *Release, cfg LinuxKitConfig, repo string) error { - fmt.Println() - fmt.Println("=== DRY RUN: LinuxKit Build & Publish ===") - fmt.Println() - fmt.Printf("Repository: %s\n", repo) - fmt.Printf("Version: %s\n", release.Version) - fmt.Printf("Config: %s\n", cfg.Config) - fmt.Printf("Formats: %s\n", strings.Join(cfg.Formats, ", ")) - fmt.Printf("Platforms: %s\n", strings.Join(cfg.Platforms, ", ")) - fmt.Println() - - outputDir := filepath.Join(release.ProjectDir, "dist", "linuxkit") - baseName := p.buildBaseName(release.Version) - - fmt.Println("Would execute commands:") - for _, platform := range cfg.Platforms { - parts := strings.Split(platform, "/") - arch := "amd64" - if len(parts) == 2 { - arch = parts[1] - } - - for _, format := range cfg.Formats { - outputName := fmt.Sprintf("%s-%s", baseName, arch) - args := p.buildLinuxKitArgs(cfg.Config, format, outputName, outputDir, arch) - fmt.Printf(" linuxkit %s\n", strings.Join(args, " ")) - } - } - fmt.Println() - - fmt.Println("Would upload artifacts to release:") - for _, platform := range cfg.Platforms { - parts := strings.Split(platform, "/") - arch := "amd64" - if len(parts) == 2 { - arch = parts[1] - } - - for _, format := range cfg.Formats { - outputName := fmt.Sprintf("%s-%s", baseName, arch) - artifactPath := p.getArtifactPath(outputDir, outputName, format) - fmt.Printf(" - %s\n", filepath.Base(artifactPath)) - if format == "docker" { - fmt.Printf(" Usage: docker load < %s\n", filepath.Base(artifactPath)) - } - } - } - - fmt.Println() - fmt.Println("=== END DRY RUN ===") - - return nil -} - -// executePublish builds LinuxKit images and uploads them. -func (p *LinuxKitPublisher) executePublish(ctx context.Context, release *Release, cfg LinuxKitConfig, repo string) error { - outputDir := filepath.Join(release.ProjectDir, "dist", "linuxkit") - - // Create output directory - if err := os.MkdirAll(outputDir, 0755); err != nil { - return fmt.Errorf("linuxkit.Publish: failed to create output directory: %w", err) - } - - baseName := p.buildBaseName(release.Version) - var artifacts []string - - // Build for each platform and format - for _, platform := range cfg.Platforms { - parts := strings.Split(platform, "/") - arch := "amd64" - if len(parts) == 2 { - arch = parts[1] - } - - for _, format := range cfg.Formats { - outputName := fmt.Sprintf("%s-%s", baseName, arch) - - // Build the image - args := p.buildLinuxKitArgs(cfg.Config, format, outputName, outputDir, arch) - cmd := exec.CommandContext(ctx, "linuxkit", args...) - cmd.Dir = release.ProjectDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - fmt.Printf("Building LinuxKit image: %s (%s)\n", outputName, format) - if err := cmd.Run(); err != nil { - return fmt.Errorf("linuxkit.Publish: build failed for %s/%s: %w", platform, format, err) - } - - // Track artifact for upload - artifactPath := p.getArtifactPath(outputDir, outputName, format) - artifacts = append(artifacts, artifactPath) - } - } - - // Upload artifacts to GitHub release - for _, artifactPath := range artifacts { - if _, err := os.Stat(artifactPath); err != nil { - return fmt.Errorf("linuxkit.Publish: artifact not found after build: %s", artifactPath) - } - - if err := UploadArtifact(ctx, repo, release.Version, artifactPath); err != nil { - return fmt.Errorf("linuxkit.Publish: failed to upload %s: %w", filepath.Base(artifactPath), err) - } - - // Print helpful usage info for docker format - if strings.HasSuffix(artifactPath, ".docker.tar") { - fmt.Printf(" Load with: docker load < %s\n", filepath.Base(artifactPath)) - } - } - - return nil -} - -// buildBaseName creates the base name for output files. -func (p *LinuxKitPublisher) buildBaseName(version string) string { - // Strip leading 'v' if present for cleaner filenames - name := strings.TrimPrefix(version, "v") - return fmt.Sprintf("linuxkit-%s", name) -} - -// buildLinuxKitArgs builds the arguments for linuxkit build command. -func (p *LinuxKitPublisher) buildLinuxKitArgs(configPath, format, outputName, outputDir, arch string) []string { - args := []string{"build"} - - // Output format - args = append(args, "--format", format) - - // Output name - args = append(args, "--name", outputName) - - // Output directory - args = append(args, "--dir", outputDir) - - // Architecture (if not amd64) - if arch != "amd64" { - args = append(args, "--arch", arch) - } - - // Config file - args = append(args, configPath) - - return args -} - -// getArtifactPath returns the expected path of the built artifact. -func (p *LinuxKitPublisher) getArtifactPath(outputDir, outputName, format string) string { - ext := p.getFormatExtension(format) - return filepath.Join(outputDir, outputName+ext) -} - -// getFormatExtension returns the file extension for a LinuxKit output format. -func (p *LinuxKitPublisher) getFormatExtension(format string) string { - switch format { - case "iso", "iso-bios", "iso-efi": - return ".iso" - case "raw", "raw-bios", "raw-efi": - return ".raw" - case "qcow2", "qcow2-bios", "qcow2-efi": - return ".qcow2" - case "vmdk": - return ".vmdk" - case "vhd": - return ".vhd" - case "gcp": - return ".img.tar.gz" - case "aws": - return ".raw" - case "docker": - // Docker format outputs a tarball that can be loaded with `docker load` - return ".docker.tar" - case "tar": - return ".tar" - case "kernel+initrd": - return "-initrd.img" - default: - return "." + format - } -} - -// validateLinuxKitCli checks if the linuxkit CLI is available. -func validateLinuxKitCli() error { - cmd := exec.Command("linuxkit", "version") - if err := cmd.Run(); err != nil { - return fmt.Errorf("linuxkit: linuxkit CLI not found. Install it from https://github.com/linuxkit/linuxkit") - } - return nil -} diff --git a/pkg/release/publishers/linuxkit_test.go b/pkg/release/publishers/linuxkit_test.go deleted file mode 100644 index 074ec92..0000000 --- a/pkg/release/publishers/linuxkit_test.go +++ /dev/null @@ -1,923 +0,0 @@ -package publishers - -import ( - "bytes" - "os" - "os/exec" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestLinuxKitPublisher_Name_Good(t *testing.T) { - t.Run("returns linuxkit", func(t *testing.T) { - p := NewLinuxKitPublisher() - assert.Equal(t, "linuxkit", p.Name()) - }) -} - -func TestLinuxKitPublisher_ParseConfig_Good(t *testing.T) { - p := NewLinuxKitPublisher() - - t.Run("uses defaults when no extended config", func(t *testing.T) { - pubCfg := PublisherConfig{Type: "linuxkit"} - cfg := p.parseConfig(pubCfg, "/project") - - assert.Equal(t, "/project/.core/linuxkit/server.yml", cfg.Config) - assert.Equal(t, []string{"iso"}, cfg.Formats) - assert.Equal(t, []string{"linux/amd64"}, cfg.Platforms) - }) - - t.Run("parses extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "linuxkit", - Extended: map[string]any{ - "config": ".core/linuxkit/custom.yml", - "formats": []any{"iso", "qcow2", "vmdk"}, - "platforms": []any{"linux/amd64", "linux/arm64"}, - }, - } - cfg := p.parseConfig(pubCfg, "/project") - - assert.Equal(t, "/project/.core/linuxkit/custom.yml", cfg.Config) - assert.Equal(t, []string{"iso", "qcow2", "vmdk"}, cfg.Formats) - assert.Equal(t, []string{"linux/amd64", "linux/arm64"}, cfg.Platforms) - }) - - t.Run("handles absolute config path", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "linuxkit", - Extended: map[string]any{ - "config": "/absolute/path/to/config.yml", - }, - } - cfg := p.parseConfig(pubCfg, "/project") - - assert.Equal(t, "/absolute/path/to/config.yml", cfg.Config) - }) -} - -func TestLinuxKitPublisher_BuildLinuxKitArgs_Good(t *testing.T) { - p := NewLinuxKitPublisher() - - t.Run("builds basic args for amd64", func(t *testing.T) { - args := p.buildLinuxKitArgs("/config/server.yml", "iso", "linuxkit-1.0.0-amd64", "/output", "amd64") - - assert.Contains(t, args, "build") - assert.Contains(t, args, "--format") - assert.Contains(t, args, "iso") - assert.Contains(t, args, "--name") - assert.Contains(t, args, "linuxkit-1.0.0-amd64") - assert.Contains(t, args, "--dir") - assert.Contains(t, args, "/output") - assert.Contains(t, args, "/config/server.yml") - // Should not contain --arch for amd64 (default) - assert.NotContains(t, args, "--arch") - }) - - t.Run("builds args with arch for arm64", func(t *testing.T) { - args := p.buildLinuxKitArgs("/config/server.yml", "qcow2", "linuxkit-1.0.0-arm64", "/output", "arm64") - - assert.Contains(t, args, "--arch") - assert.Contains(t, args, "arm64") - assert.Contains(t, args, "qcow2") - }) -} - -func TestLinuxKitPublisher_BuildBaseName_Good(t *testing.T) { - p := NewLinuxKitPublisher() - - t.Run("strips v prefix", func(t *testing.T) { - name := p.buildBaseName("v1.2.3") - assert.Equal(t, "linuxkit-1.2.3", name) - }) - - t.Run("handles version without v prefix", func(t *testing.T) { - name := p.buildBaseName("1.2.3") - assert.Equal(t, "linuxkit-1.2.3", name) - }) -} - -func TestLinuxKitPublisher_GetArtifactPath_Good(t *testing.T) { - p := NewLinuxKitPublisher() - - tests := []struct { - name string - outputDir string - outputName string - format string - expected string - }{ - { - name: "ISO format", - outputDir: "/dist/linuxkit", - outputName: "linuxkit-1.0.0-amd64", - format: "iso", - expected: "/dist/linuxkit/linuxkit-1.0.0-amd64.iso", - }, - { - name: "raw format", - outputDir: "/dist/linuxkit", - outputName: "linuxkit-1.0.0-amd64", - format: "raw", - expected: "/dist/linuxkit/linuxkit-1.0.0-amd64.raw", - }, - { - name: "qcow2 format", - outputDir: "/dist/linuxkit", - outputName: "linuxkit-1.0.0-arm64", - format: "qcow2", - expected: "/dist/linuxkit/linuxkit-1.0.0-arm64.qcow2", - }, - { - name: "vmdk format", - outputDir: "/dist/linuxkit", - outputName: "linuxkit-1.0.0-amd64", - format: "vmdk", - expected: "/dist/linuxkit/linuxkit-1.0.0-amd64.vmdk", - }, - { - name: "gcp format", - outputDir: "/dist/linuxkit", - outputName: "linuxkit-1.0.0-amd64", - format: "gcp", - expected: "/dist/linuxkit/linuxkit-1.0.0-amd64.img.tar.gz", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - path := p.getArtifactPath(tc.outputDir, tc.outputName, tc.format) - assert.Equal(t, tc.expected, path) - }) - } -} - -func TestLinuxKitPublisher_GetFormatExtension_Good(t *testing.T) { - p := NewLinuxKitPublisher() - - tests := []struct { - format string - expected string - }{ - {"iso", ".iso"}, - {"raw", ".raw"}, - {"qcow2", ".qcow2"}, - {"vmdk", ".vmdk"}, - {"vhd", ".vhd"}, - {"gcp", ".img.tar.gz"}, - {"aws", ".raw"}, - {"unknown", ".unknown"}, - } - - for _, tc := range tests { - t.Run(tc.format, func(t *testing.T) { - ext := p.getFormatExtension(tc.format) - assert.Equal(t, tc.expected, ext) - }) - } -} - -func TestLinuxKitPublisher_Publish_Bad(t *testing.T) { - p := NewLinuxKitPublisher() - - t.Run("fails when config file not found with linuxkit installed", func(t *testing.T) { - if err := validateLinuxKitCli(); err != nil { - t.Skip("skipping test: linuxkit CLI not available") - } - - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/nonexistent", - } - pubCfg := PublisherConfig{ - Type: "linuxkit", - Extended: map[string]any{ - "config": "/nonexistent/config.yml", - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err := p.Publish(nil, release, pubCfg, relCfg, false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "config file not found") - }) - - t.Run("fails when linuxkit CLI not available", func(t *testing.T) { - if err := validateLinuxKitCli(); err == nil { - t.Skip("skipping test: linuxkit CLI is available") - } - - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/tmp", - } - pubCfg := PublisherConfig{Type: "linuxkit"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err := p.Publish(nil, release, pubCfg, relCfg, false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "linuxkit CLI not found") - }) - - t.Run("fails when repository cannot be detected and not provided", func(t *testing.T) { - if err := validateLinuxKitCli(); err != nil { - t.Skip("skipping test: linuxkit CLI not available") - } - - // Create temp directory that is NOT a git repo - tmpDir, err := os.MkdirTemp("", "linuxkit-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - // Create a config file - configPath := filepath.Join(tmpDir, "config.yml") - err = os.WriteFile(configPath, []byte("kernel:\n image: test\n"), 0644) - require.NoError(t, err) - - release := &Release{ - Version: "v1.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{ - Type: "linuxkit", - Extended: map[string]any{ - "config": "config.yml", - }, - } - relCfg := &mockReleaseConfig{repository: ""} // Empty repository - - err = p.Publish(nil, release, pubCfg, relCfg, true) - assert.Error(t, err) - assert.Contains(t, err.Error(), "could not determine repository") - }) -} - -func TestValidateLinuxKitCli_Good(t *testing.T) { - t.Run("returns expected error when linuxkit not installed", func(t *testing.T) { - err := validateLinuxKitCli() - if err != nil { - // LinuxKit is not installed - assert.Contains(t, err.Error(), "linuxkit CLI not found") - } - // If err is nil, linuxkit is installed - that's OK - }) -} - -func TestLinuxKitPublisher_Publish_WithCLI_Good(t *testing.T) { - // These tests run only when linuxkit CLI is available - if err := validateLinuxKitCli(); err != nil { - t.Skip("skipping test: linuxkit CLI not available") - } - - p := NewLinuxKitPublisher() - - t.Run("succeeds with dry run and valid config", func(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "linuxkit-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - // Create config directory and file - configDir := filepath.Join(tmpDir, ".core", "linuxkit") - err = os.MkdirAll(configDir, 0755) - require.NoError(t, err) - - configPath := filepath.Join(configDir, "server.yml") - err = os.WriteFile(configPath, []byte("kernel:\n image: linuxkit/kernel:5.10\n"), 0644) - require.NoError(t, err) - - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{Type: "linuxkit"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err = p.Publish(nil, release, pubCfg, relCfg, true) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "DRY RUN: LinuxKit Build & Publish") - }) - - t.Run("fails with missing config file", func(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "linuxkit-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - release := &Release{ - Version: "v1.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{Type: "linuxkit"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err = p.Publish(nil, release, pubCfg, relCfg, false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "config file not found") - }) - - t.Run("uses relCfg repository", func(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "linuxkit-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - configDir := filepath.Join(tmpDir, ".core", "linuxkit") - err = os.MkdirAll(configDir, 0755) - require.NoError(t, err) - - configPath := filepath.Join(configDir, "server.yml") - err = os.WriteFile(configPath, []byte("kernel:\n image: test\n"), 0644) - require.NoError(t, err) - - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{Type: "linuxkit"} - relCfg := &mockReleaseConfig{repository: "custom-owner/custom-repo"} - - err = p.Publish(nil, release, pubCfg, relCfg, true) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "custom-owner/custom-repo") - }) - - t.Run("detects repository when not provided", func(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "linuxkit-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - // Create config file - configDir := filepath.Join(tmpDir, ".core", "linuxkit") - err = os.MkdirAll(configDir, 0755) - require.NoError(t, err) - - configPath := filepath.Join(configDir, "server.yml") - err = os.WriteFile(configPath, []byte("kernel:\n image: test\n"), 0644) - require.NoError(t, err) - - // Initialize git repo - cmd := exec.Command("git", "init") - cmd.Dir = tmpDir - require.NoError(t, cmd.Run()) - - cmd = exec.Command("git", "remote", "add", "origin", "git@github.com:detected-owner/detected-repo.git") - cmd.Dir = tmpDir - require.NoError(t, cmd.Run()) - - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{Type: "linuxkit"} - relCfg := &mockReleaseConfig{repository: ""} // Empty to trigger detection - - err = p.Publish(nil, release, pubCfg, relCfg, true) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "detected-owner/detected-repo") - }) -} - -func TestLinuxKitPublisher_Publish_NilRelCfg_Good(t *testing.T) { - if err := validateLinuxKitCli(); err != nil { - t.Skip("skipping test: linuxkit CLI not available") - } - - p := NewLinuxKitPublisher() - - t.Run("handles nil relCfg by detecting repo", func(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "linuxkit-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - // Create config file - configDir := filepath.Join(tmpDir, ".core", "linuxkit") - err = os.MkdirAll(configDir, 0755) - require.NoError(t, err) - - configPath := filepath.Join(configDir, "server.yml") - err = os.WriteFile(configPath, []byte("kernel:\n image: test\n"), 0644) - require.NoError(t, err) - - // Initialize git repo - cmd := exec.Command("git", "init") - cmd.Dir = tmpDir - require.NoError(t, cmd.Run()) - - cmd = exec.Command("git", "remote", "add", "origin", "git@github.com:nil-owner/nil-repo.git") - cmd.Dir = tmpDir - require.NoError(t, cmd.Run()) - - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{Type: "linuxkit"} - - err = p.Publish(nil, release, pubCfg, nil, true) // nil relCfg - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "nil-owner/nil-repo") - }) -} - -// mockReleaseConfig implements ReleaseConfig for testing. -type mockReleaseConfig struct { - repository string - projectName string -} - -func (m *mockReleaseConfig) GetRepository() string { - return m.repository -} - -func (m *mockReleaseConfig) GetProjectName() string { - return m.projectName -} - -func TestLinuxKitPublisher_DryRunPublish_Good(t *testing.T) { - p := NewLinuxKitPublisher() - - t.Run("outputs expected dry run information", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/project", - } - cfg := LinuxKitConfig{ - Config: "/project/.core/linuxkit/server.yml", - Formats: []string{"iso", "qcow2"}, - Platforms: []string{"linux/amd64", "linux/arm64"}, - } - - err := p.dryRunPublish(release, cfg, "owner/repo") - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "DRY RUN: LinuxKit Build & Publish") - assert.Contains(t, output, "Repository: owner/repo") - assert.Contains(t, output, "Version: v1.0.0") - assert.Contains(t, output, "Config: /project/.core/linuxkit/server.yml") - assert.Contains(t, output, "Formats: iso, qcow2") - assert.Contains(t, output, "Platforms: linux/amd64, linux/arm64") - assert.Contains(t, output, "Would execute commands:") - assert.Contains(t, output, "linuxkit build") - assert.Contains(t, output, "Would upload artifacts to release:") - assert.Contains(t, output, "linuxkit-1.0.0-amd64.iso") - assert.Contains(t, output, "linuxkit-1.0.0-amd64.qcow2") - assert.Contains(t, output, "linuxkit-1.0.0-arm64.iso") - assert.Contains(t, output, "linuxkit-1.0.0-arm64.qcow2") - assert.Contains(t, output, "END DRY RUN") - }) - - t.Run("shows docker format usage hint", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/project", - } - cfg := LinuxKitConfig{ - Config: "/config.yml", - Formats: []string{"docker"}, - Platforms: []string{"linux/amd64"}, - } - - err := p.dryRunPublish(release, cfg, "owner/repo") - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "linuxkit-1.0.0-amd64.docker.tar") - assert.Contains(t, output, "Usage: docker load <") - }) - - t.Run("handles single platform and format", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v2.0.0", - ProjectDir: "/project", - } - cfg := LinuxKitConfig{ - Config: "/config.yml", - Formats: []string{"iso"}, - Platforms: []string{"linux/amd64"}, - } - - err := p.dryRunPublish(release, cfg, "owner/repo") - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "linuxkit-2.0.0-amd64.iso") - assert.NotContains(t, output, "arm64") - }) -} - -func TestLinuxKitPublisher_GetFormatExtension_AllFormats_Good(t *testing.T) { - p := NewLinuxKitPublisher() - - tests := []struct { - format string - expected string - }{ - {"iso", ".iso"}, - {"iso-bios", ".iso"}, - {"iso-efi", ".iso"}, - {"raw", ".raw"}, - {"raw-bios", ".raw"}, - {"raw-efi", ".raw"}, - {"qcow2", ".qcow2"}, - {"qcow2-bios", ".qcow2"}, - {"qcow2-efi", ".qcow2"}, - {"vmdk", ".vmdk"}, - {"vhd", ".vhd"}, - {"gcp", ".img.tar.gz"}, - {"aws", ".raw"}, - {"docker", ".docker.tar"}, - {"tar", ".tar"}, - {"kernel+initrd", "-initrd.img"}, - {"custom--format", ".custom--format"}, - } - - for _, tc := range tests { - t.Run(tc.format, func(t *testing.T) { - ext := p.getFormatExtension(tc.format) - assert.Equal(t, tc.expected, ext) - }) - } -} - -func TestLinuxKitPublisher_BuildLinuxKitArgs_AllArchitectures_Good(t *testing.T) { - p := NewLinuxKitPublisher() - - t.Run("amd64 does not include arch flag", func(t *testing.T) { - args := p.buildLinuxKitArgs("/config.yml", "iso", "output--name", "/output", "amd64") - - assert.Contains(t, args, "build") - assert.Contains(t, args, "--format") - assert.Contains(t, args, "iso") - assert.Contains(t, args, "--name") - assert.Contains(t, args, "output--name") - assert.Contains(t, args, "--dir") - assert.Contains(t, args, "/output") - assert.Contains(t, args, "/config.yml") - assert.NotContains(t, args, "--arch") - }) - - t.Run("arm64 includes arch flag", func(t *testing.T) { - args := p.buildLinuxKitArgs("/config.yml", "qcow2", "output--name", "/output", "arm64") - - assert.Contains(t, args, "--arch") - assert.Contains(t, args, "arm64") - }) - - t.Run("other architectures include arch flag", func(t *testing.T) { - args := p.buildLinuxKitArgs("/config.yml", "raw", "output--name", "/output", "riscv64") - - assert.Contains(t, args, "--arch") - assert.Contains(t, args, "riscv64") - }) -} - -func TestLinuxKitPublisher_ParseConfig_EdgeCases_Good(t *testing.T) { - p := NewLinuxKitPublisher() - - t.Run("handles nil extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "linuxkit", - Extended: nil, - } - - cfg := p.parseConfig(pubCfg, "/project") - - assert.Equal(t, "/project/.core/linuxkit/server.yml", cfg.Config) - assert.Equal(t, []string{"iso"}, cfg.Formats) - assert.Equal(t, []string{"linux/amd64"}, cfg.Platforms) - }) - - t.Run("handles empty extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "linuxkit", - Extended: map[string]any{}, - } - - cfg := p.parseConfig(pubCfg, "/project") - - assert.Equal(t, "/project/.core/linuxkit/server.yml", cfg.Config) - assert.Equal(t, []string{"iso"}, cfg.Formats) - assert.Equal(t, []string{"linux/amd64"}, cfg.Platforms) - }) - - t.Run("handles mixed format types in extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "linuxkit", - Extended: map[string]any{ - "formats": []any{"iso", 123, "qcow2"}, // includes non-string - }, - } - - cfg := p.parseConfig(pubCfg, "/project") - - assert.Equal(t, []string{"iso", "qcow2"}, cfg.Formats) - }) - - t.Run("handles mixed platform types in extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "linuxkit", - Extended: map[string]any{ - "platforms": []any{"linux/amd64", nil, "linux/arm64"}, - }, - } - - cfg := p.parseConfig(pubCfg, "/project") - - assert.Equal(t, []string{"linux/amd64", "linux/arm64"}, cfg.Platforms) - }) -} - -func TestLinuxKitPublisher_BuildBaseName_EdgeCases_Good(t *testing.T) { - p := NewLinuxKitPublisher() - - tests := []struct { - name string - version string - expected string - }{ - {"strips v prefix", "v1.2.3", "linuxkit-1.2.3"}, - {"no v prefix", "1.2.3", "linuxkit-1.2.3"}, - {"prerelease version", "v1.0.0-alpha.1", "linuxkit-1.0.0-alpha.1"}, - {"build metadata", "v1.0.0+build.123", "linuxkit-1.0.0+build.123"}, - {"only v", "v", "linuxkit-"}, - {"empty string", "", "linuxkit-"}, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - name := p.buildBaseName(tc.version) - assert.Equal(t, tc.expected, name) - }) - } -} - -func TestLinuxKitPublisher_GetArtifactPath_AllFormats_Good(t *testing.T) { - p := NewLinuxKitPublisher() - - tests := []struct { - name string - outputDir string - outputName string - format string - expected string - }{ - { - name: "ISO format", - outputDir: "/dist", - outputName: "linuxkit-1.0.0-amd64", - format: "iso", - expected: "/dist/linuxkit-1.0.0-amd64.iso", - }, - { - name: "ISO-BIOS format", - outputDir: "/dist", - outputName: "linuxkit-1.0.0-amd64", - format: "iso-bios", - expected: "/dist/linuxkit-1.0.0-amd64.iso", - }, - { - name: "docker format", - outputDir: "/output", - outputName: "linuxkit-2.0.0-arm64", - format: "docker", - expected: "/output/linuxkit-2.0.0-arm64.docker.tar", - }, - { - name: "tar format", - outputDir: "/output", - outputName: "linuxkit-1.0.0", - format: "tar", - expected: "/output/linuxkit-1.0.0.tar", - }, - { - name: "kernel+initrd format", - outputDir: "/output", - outputName: "linuxkit-1.0.0", - format: "kernel+initrd", - expected: "/output/linuxkit-1.0.0-initrd.img", - }, - { - name: "GCP format", - outputDir: "/output", - outputName: "linuxkit-1.0.0", - format: "gcp", - expected: "/output/linuxkit-1.0.0.img.tar.gz", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - path := p.getArtifactPath(tc.outputDir, tc.outputName, tc.format) - assert.Equal(t, tc.expected, path) - }) - } -} - -func TestLinuxKitPublisher_Publish_DryRun_Good(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test in short mode") - } - - // Skip if linuxkit CLI is not available - if err := validateLinuxKitCli(); err != nil { - t.Skip("skipping test: linuxkit CLI not available") - } - - p := NewLinuxKitPublisher() - - t.Run("dry run succeeds with valid config file", func(t *testing.T) { - // Create temp directory with config file - tmpDir, err := os.MkdirTemp("", "linuxkit-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - configDir := filepath.Join(tmpDir, ".core", "linuxkit") - err = os.MkdirAll(configDir, 0755) - require.NoError(t, err) - - configPath := filepath.Join(configDir, "server.yml") - err = os.WriteFile(configPath, []byte("kernel:\n image: linuxkit/kernel:5.10\n"), 0644) - require.NoError(t, err) - - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{Type: "linuxkit"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err = p.Publish(nil, release, pubCfg, relCfg, true) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "DRY RUN: LinuxKit Build & Publish") - }) - - t.Run("dry run uses custom config path", func(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "linuxkit-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - customConfigPath := filepath.Join(tmpDir, "custom-config.yml") - err = os.WriteFile(customConfigPath, []byte("kernel:\n image: custom\n"), 0644) - require.NoError(t, err) - - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v1.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{ - Type: "linuxkit", - Extended: map[string]any{ - "config": customConfigPath, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err = p.Publish(nil, release, pubCfg, relCfg, true) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "custom-config.yml") - }) - - t.Run("dry run with multiple formats and platforms", func(t *testing.T) { - tmpDir, err := os.MkdirTemp("", "linuxkit-test") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - configPath := filepath.Join(tmpDir, "config.yml") - err = os.WriteFile(configPath, []byte("kernel:\n image: test\n"), 0644) - require.NoError(t, err) - - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - release := &Release{ - Version: "v2.0.0", - ProjectDir: tmpDir, - } - pubCfg := PublisherConfig{ - Type: "linuxkit", - Extended: map[string]any{ - "config": "config.yml", - "formats": []any{"iso", "qcow2", "vmdk"}, - "platforms": []any{"linux/amd64", "linux/arm64"}, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err = p.Publish(nil, release, pubCfg, relCfg, true) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - // Check all format/platform combinations are listed - assert.Contains(t, output, "linuxkit-2.0.0-amd64.iso") - assert.Contains(t, output, "linuxkit-2.0.0-amd64.qcow2") - assert.Contains(t, output, "linuxkit-2.0.0-amd64.vmdk") - assert.Contains(t, output, "linuxkit-2.0.0-arm64.iso") - assert.Contains(t, output, "linuxkit-2.0.0-arm64.qcow2") - assert.Contains(t, output, "linuxkit-2.0.0-arm64.vmdk") - }) -} diff --git a/pkg/release/publishers/npm.go b/pkg/release/publishers/npm.go deleted file mode 100644 index 9718698..0000000 --- a/pkg/release/publishers/npm.go +++ /dev/null @@ -1,248 +0,0 @@ -// Package publishers provides release publishing implementations. -package publishers - -import ( - "bytes" - "context" - "embed" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - "text/template" -) - -//go:embed templates/npm/*.tmpl -var npmTemplates embed.FS - -// NpmConfig holds npm-specific configuration. -type NpmConfig struct { - // Package is the npm package name (e.g., "@host-uk/core"). - Package string - // Access is the npm access level: "public" or "restricted". - Access string -} - -// NpmPublisher publishes releases to npm using the binary wrapper pattern. -type NpmPublisher struct{} - -// NewNpmPublisher creates a new npm publisher. -func NewNpmPublisher() *NpmPublisher { - return &NpmPublisher{} -} - -// Name returns the publisher's identifier. -func (p *NpmPublisher) Name() string { - return "npm" -} - -// Publish publishes the release to npm. -// It generates a binary wrapper package that downloads the correct platform binary on postinstall. -func (p *NpmPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error { - // Parse npm config - npmCfg := p.parseConfig(pubCfg, relCfg) - - // Validate configuration - if npmCfg.Package == "" { - return fmt.Errorf("npm.Publish: package name is required (set publish.npm.package in config)") - } - - // Get repository - repo := "" - if relCfg != nil { - repo = relCfg.GetRepository() - } - if repo == "" { - detectedRepo, err := detectRepository(release.ProjectDir) - if err != nil { - return fmt.Errorf("npm.Publish: could not determine repository: %w", err) - } - repo = detectedRepo - } - - // Get project name (binary name) - projectName := "" - if relCfg != nil { - projectName = relCfg.GetProjectName() - } - if projectName == "" { - // Try to infer from package name - parts := strings.Split(npmCfg.Package, "/") - projectName = parts[len(parts)-1] - } - - // Strip leading 'v' from version for npm - version := strings.TrimPrefix(release.Version, "v") - - // Template data - data := npmTemplateData{ - Package: npmCfg.Package, - Version: version, - Description: fmt.Sprintf("%s CLI", projectName), - License: "MIT", - Repository: repo, - BinaryName: projectName, - ProjectName: projectName, - Access: npmCfg.Access, - } - - if dryRun { - return p.dryRunPublish(data, &npmCfg) - } - - return p.executePublish(ctx, data, &npmCfg) -} - -// parseConfig extracts npm-specific configuration from the publisher config. -func (p *NpmPublisher) parseConfig(pubCfg PublisherConfig, relCfg ReleaseConfig) NpmConfig { - cfg := NpmConfig{ - Package: "", - Access: "public", - } - - // Override from extended config if present - if ext, ok := pubCfg.Extended.(map[string]any); ok { - if pkg, ok := ext["package"].(string); ok && pkg != "" { - cfg.Package = pkg - } - if access, ok := ext["access"].(string); ok && access != "" { - cfg.Access = access - } - } - - return cfg -} - -// npmTemplateData holds data for npm templates. -type npmTemplateData struct { - Package string - Version string - Description string - License string - Repository string - BinaryName string - ProjectName string - Access string -} - -// dryRunPublish shows what would be done without actually publishing. -func (p *NpmPublisher) dryRunPublish(data npmTemplateData, cfg *NpmConfig) error { - fmt.Println() - fmt.Println("=== DRY RUN: npm Publish ===") - fmt.Println() - fmt.Printf("Package: %s\n", data.Package) - fmt.Printf("Version: %s\n", data.Version) - fmt.Printf("Access: %s\n", data.Access) - fmt.Printf("Repository: %s\n", data.Repository) - fmt.Printf("Binary: %s\n", data.BinaryName) - fmt.Println() - - // Generate and show package.json - pkgJSON, err := p.renderTemplate("templates/npm/package.json.tmpl", data) - if err != nil { - return fmt.Errorf("npm.dryRunPublish: %w", err) - } - fmt.Println("Generated package.json:") - fmt.Println("---") - fmt.Println(pkgJSON) - fmt.Println("---") - fmt.Println() - - fmt.Println("Would run: npm publish --access", data.Access) - fmt.Println() - fmt.Println("=== END DRY RUN ===") - - return nil -} - -// executePublish actually creates and publishes the npm package. -func (p *NpmPublisher) executePublish(ctx context.Context, data npmTemplateData, cfg *NpmConfig) error { - // Check for NPM_TOKEN - if os.Getenv("NPM_TOKEN") == "" { - return fmt.Errorf("npm.Publish: NPM_TOKEN environment variable is required") - } - - // Create temp directory for package - tmpDir, err := os.MkdirTemp("", "npm-publish-*") - if err != nil { - return fmt.Errorf("npm.Publish: failed to create temp directory: %w", err) - } - defer os.RemoveAll(tmpDir) - - // Create bin directory - binDir := filepath.Join(tmpDir, "bin") - if err := os.MkdirAll(binDir, 0755); err != nil { - return fmt.Errorf("npm.Publish: failed to create bin directory: %w", err) - } - - // Generate package.json - pkgJSON, err := p.renderTemplate("templates/npm/package.json.tmpl", data) - if err != nil { - return fmt.Errorf("npm.Publish: failed to render package.json: %w", err) - } - if err := os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(pkgJSON), 0644); err != nil { - return fmt.Errorf("npm.Publish: failed to write package.json: %w", err) - } - - // Generate install.js - installJS, err := p.renderTemplate("templates/npm/install.js.tmpl", data) - if err != nil { - return fmt.Errorf("npm.Publish: failed to render install.js: %w", err) - } - if err := os.WriteFile(filepath.Join(tmpDir, "install.js"), []byte(installJS), 0644); err != nil { - return fmt.Errorf("npm.Publish: failed to write install.js: %w", err) - } - - // Generate run.js - runJS, err := p.renderTemplate("templates/npm/run.js.tmpl", data) - if err != nil { - return fmt.Errorf("npm.Publish: failed to render run.js: %w", err) - } - if err := os.WriteFile(filepath.Join(binDir, "run.js"), []byte(runJS), 0755); err != nil { - return fmt.Errorf("npm.Publish: failed to write run.js: %w", err) - } - - // Create .npmrc with token - npmrc := fmt.Sprintf("//registry.npmjs.org/:_authToken=${NPM_TOKEN}\n") - if err := os.WriteFile(filepath.Join(tmpDir, ".npmrc"), []byte(npmrc), 0600); err != nil { - return fmt.Errorf("npm.Publish: failed to write .npmrc: %w", err) - } - - // Run npm publish - cmd := exec.CommandContext(ctx, "npm", "publish", "--access", data.Access) - cmd.Dir = tmpDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - cmd.Env = append(os.Environ(), "NPM_TOKEN="+os.Getenv("NPM_TOKEN")) - - fmt.Printf("Publishing %s@%s to npm...\n", data.Package, data.Version) - if err := cmd.Run(); err != nil { - return fmt.Errorf("npm.Publish: npm publish failed: %w", err) - } - - fmt.Printf("Published %s@%s to npm\n", data.Package, data.Version) - fmt.Printf(" https://www.npmjs.com/package/%s\n", data.Package) - - return nil -} - -// renderTemplate renders an embedded template with the given data. -func (p *NpmPublisher) renderTemplate(name string, data npmTemplateData) (string, error) { - content, err := npmTemplates.ReadFile(name) - if err != nil { - return "", fmt.Errorf("failed to read template %s: %w", name, err) - } - - tmpl, err := template.New(filepath.Base(name)).Parse(string(content)) - if err != nil { - return "", fmt.Errorf("failed to parse template %s: %w", name, err) - } - - var buf bytes.Buffer - if err := tmpl.Execute(&buf, data); err != nil { - return "", fmt.Errorf("failed to execute template %s: %w", name, err) - } - - return buf.String(), nil -} diff --git a/pkg/release/publishers/npm_test.go b/pkg/release/publishers/npm_test.go deleted file mode 100644 index b726ee4..0000000 --- a/pkg/release/publishers/npm_test.go +++ /dev/null @@ -1,298 +0,0 @@ -package publishers - -import ( - "bytes" - "os" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestNpmPublisher_Name_Good(t *testing.T) { - t.Run("returns npm", func(t *testing.T) { - p := NewNpmPublisher() - assert.Equal(t, "npm", p.Name()) - }) -} - -func TestNpmPublisher_ParseConfig_Good(t *testing.T) { - p := NewNpmPublisher() - - t.Run("uses defaults when no extended config", func(t *testing.T) { - pubCfg := PublisherConfig{Type: "npm"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Package) - assert.Equal(t, "public", cfg.Access) - }) - - t.Run("parses package and access from extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "npm", - Extended: map[string]any{ - "package": "@myorg/mypackage", - "access": "restricted", - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Equal(t, "@myorg/mypackage", cfg.Package) - assert.Equal(t, "restricted", cfg.Access) - }) - - t.Run("keeps default access when not specified", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "npm", - Extended: map[string]any{ - "package": "@myorg/mypackage", - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Equal(t, "@myorg/mypackage", cfg.Package) - assert.Equal(t, "public", cfg.Access) - }) - - t.Run("handles nil extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "npm", - Extended: nil, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Package) - assert.Equal(t, "public", cfg.Access) - }) - - t.Run("handles empty strings in config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "npm", - Extended: map[string]any{ - "package": "", - "access": "", - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Package) - assert.Equal(t, "public", cfg.Access) - }) -} - -func TestNpmPublisher_RenderTemplate_Good(t *testing.T) { - p := NewNpmPublisher() - - t.Run("renders package.json template with data", func(t *testing.T) { - data := npmTemplateData{ - Package: "@myorg/mycli", - Version: "1.2.3", - Description: "My awesome CLI", - License: "MIT", - Repository: "owner/myapp", - BinaryName: "myapp", - ProjectName: "myapp", - Access: "public", - } - - result, err := p.renderTemplate("templates/npm/package.json.tmpl", data) - require.NoError(t, err) - - assert.Contains(t, result, `"name": "@myorg/mycli"`) - assert.Contains(t, result, `"version": "1.2.3"`) - assert.Contains(t, result, `"description": "My awesome CLI"`) - assert.Contains(t, result, `"license": "MIT"`) - assert.Contains(t, result, "owner/myapp") - assert.Contains(t, result, `"myapp": "./bin/run.js"`) - assert.Contains(t, result, `"access": "public"`) - }) - - t.Run("renders restricted access correctly", func(t *testing.T) { - data := npmTemplateData{ - Package: "@private/cli", - Version: "1.0.0", - Description: "Private CLI", - License: "MIT", - Repository: "org/repo", - BinaryName: "cli", - ProjectName: "cli", - Access: "restricted", - } - - result, err := p.renderTemplate("templates/npm/package.json.tmpl", data) - require.NoError(t, err) - - assert.Contains(t, result, `"access": "restricted"`) - }) -} - -func TestNpmPublisher_RenderTemplate_Bad(t *testing.T) { - p := NewNpmPublisher() - - t.Run("returns error for non-existent template", func(t *testing.T) { - data := npmTemplateData{} - _, err := p.renderTemplate("templates/npm/nonexistent.tmpl", data) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to read template") - }) -} - -func TestNpmPublisher_DryRunPublish_Good(t *testing.T) { - p := NewNpmPublisher() - - t.Run("outputs expected dry run information", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - data := npmTemplateData{ - Package: "@myorg/mycli", - Version: "1.0.0", - Access: "public", - Repository: "owner/repo", - BinaryName: "mycli", - Description: "My CLI", - } - cfg := &NpmConfig{ - Package: "@myorg/mycli", - Access: "public", - } - - err := p.dryRunPublish(data, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "DRY RUN: npm Publish") - assert.Contains(t, output, "Package: @myorg/mycli") - assert.Contains(t, output, "Version: 1.0.0") - assert.Contains(t, output, "Access: public") - assert.Contains(t, output, "Repository: owner/repo") - assert.Contains(t, output, "Binary: mycli") - assert.Contains(t, output, "Generated package.json:") - assert.Contains(t, output, "Would run: npm publish --access public") - assert.Contains(t, output, "END DRY RUN") - }) - - t.Run("shows restricted access correctly", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - data := npmTemplateData{ - Package: "@private/cli", - Version: "2.0.0", - Access: "restricted", - Repository: "org/repo", - BinaryName: "cli", - } - cfg := &NpmConfig{ - Package: "@private/cli", - Access: "restricted", - } - - err := p.dryRunPublish(data, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "Access: restricted") - assert.Contains(t, output, "Would run: npm publish --access restricted") - }) -} - -func TestNpmPublisher_Publish_Bad(t *testing.T) { - p := NewNpmPublisher() - - t.Run("fails when package name not configured", func(t *testing.T) { - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/project", - } - pubCfg := PublisherConfig{Type: "npm"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err := p.Publish(nil, release, pubCfg, relCfg, false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "package name is required") - }) - - t.Run("fails when NPM_TOKEN not set in non-dry-run", func(t *testing.T) { - // Ensure NPM_TOKEN is not set - oldToken := os.Getenv("NPM_TOKEN") - os.Unsetenv("NPM_TOKEN") - defer func() { - if oldToken != "" { - os.Setenv("NPM_TOKEN", oldToken) - } - }() - - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/project", - } - pubCfg := PublisherConfig{ - Type: "npm", - Extended: map[string]any{ - "package": "@test/package", - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err := p.Publish(nil, release, pubCfg, relCfg, false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "NPM_TOKEN environment variable is required") - }) -} - -func TestNpmConfig_Defaults_Good(t *testing.T) { - t.Run("has sensible defaults", func(t *testing.T) { - p := NewNpmPublisher() - pubCfg := PublisherConfig{Type: "npm"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Package) - assert.Equal(t, "public", cfg.Access) - }) -} - -func TestNpmTemplateData_Good(t *testing.T) { - t.Run("struct has all expected fields", func(t *testing.T) { - data := npmTemplateData{ - Package: "@myorg/package", - Version: "1.0.0", - Description: "description", - License: "MIT", - Repository: "org/repo", - BinaryName: "cli", - ProjectName: "cli", - Access: "public", - } - - assert.Equal(t, "@myorg/package", data.Package) - assert.Equal(t, "1.0.0", data.Version) - assert.Equal(t, "description", data.Description) - assert.Equal(t, "MIT", data.License) - assert.Equal(t, "org/repo", data.Repository) - assert.Equal(t, "cli", data.BinaryName) - assert.Equal(t, "cli", data.ProjectName) - assert.Equal(t, "public", data.Access) - }) -} diff --git a/pkg/release/publishers/publisher.go b/pkg/release/publishers/publisher.go deleted file mode 100644 index f91de23..0000000 --- a/pkg/release/publishers/publisher.go +++ /dev/null @@ -1,68 +0,0 @@ -// Package publishers provides release publishing implementations. -package publishers - -import ( - "context" - - "github.com/host-uk/core/pkg/build" -) - -// Release represents a release to be published. -type Release struct { - // Version is the semantic version string (e.g., "v1.2.3"). - Version string - // Artifacts are the built release artifacts. - Artifacts []build.Artifact - // Changelog is the generated markdown changelog. - Changelog string - // ProjectDir is the root directory of the project. - ProjectDir string -} - -// PublisherConfig holds configuration for a publisher. -type PublisherConfig struct { - // Type is the publisher type (e.g., "github", "linuxkit", "docker"). - Type string - // Prerelease marks the release as a prerelease. - Prerelease bool - // Draft creates the release as a draft. - Draft bool - // Extended holds publisher-specific configuration. - Extended any -} - -// ReleaseConfig holds release configuration needed by publishers. -type ReleaseConfig interface { - GetRepository() string - GetProjectName() string -} - -// Publisher defines the interface for release publishers. -type Publisher interface { - // Name returns the publisher's identifier. - Name() string - // Publish publishes the release to the target. - // If dryRun is true, it prints what would be done without executing. - Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error -} - -// NewRelease creates a Release from the release package's Release type. -// This is a helper to convert between packages. -func NewRelease(version string, artifacts []build.Artifact, changelog, projectDir string) *Release { - return &Release{ - Version: version, - Artifacts: artifacts, - Changelog: changelog, - ProjectDir: projectDir, - } -} - -// NewPublisherConfig creates a PublisherConfig. -func NewPublisherConfig(pubType string, prerelease, draft bool, extended any) PublisherConfig { - return PublisherConfig{ - Type: pubType, - Prerelease: prerelease, - Draft: draft, - Extended: extended, - } -} diff --git a/pkg/release/publishers/scoop.go b/pkg/release/publishers/scoop.go deleted file mode 100644 index 25e7ee1..0000000 --- a/pkg/release/publishers/scoop.go +++ /dev/null @@ -1,268 +0,0 @@ -// Package publishers provides release publishing implementations. -package publishers - -import ( - "bytes" - "context" - "embed" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - "text/template" - - "github.com/host-uk/core/pkg/build" -) - -//go:embed templates/scoop/*.tmpl -var scoopTemplates embed.FS - -// ScoopConfig holds Scoop-specific configuration. -type ScoopConfig struct { - // Bucket is the Scoop bucket repository (e.g., "host-uk/scoop-bucket"). - Bucket string - // Official config for generating files for official repo PRs. - Official *OfficialConfig -} - -// ScoopPublisher publishes releases to Scoop. -type ScoopPublisher struct{} - -// NewScoopPublisher creates a new Scoop publisher. -func NewScoopPublisher() *ScoopPublisher { - return &ScoopPublisher{} -} - -// Name returns the publisher's identifier. -func (p *ScoopPublisher) Name() string { - return "scoop" -} - -// Publish publishes the release to Scoop. -func (p *ScoopPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error { - cfg := p.parseConfig(pubCfg, relCfg) - - if cfg.Bucket == "" && (cfg.Official == nil || !cfg.Official.Enabled) { - return fmt.Errorf("scoop.Publish: bucket is required (set publish.scoop.bucket in config)") - } - - repo := "" - if relCfg != nil { - repo = relCfg.GetRepository() - } - if repo == "" { - detectedRepo, err := detectRepository(release.ProjectDir) - if err != nil { - return fmt.Errorf("scoop.Publish: could not determine repository: %w", err) - } - repo = detectedRepo - } - - projectName := "" - if relCfg != nil { - projectName = relCfg.GetProjectName() - } - if projectName == "" { - parts := strings.Split(repo, "/") - projectName = parts[len(parts)-1] - } - - version := strings.TrimPrefix(release.Version, "v") - checksums := buildChecksumMap(release.Artifacts) - - data := scoopTemplateData{ - PackageName: projectName, - Description: fmt.Sprintf("%s CLI", projectName), - Repository: repo, - Version: version, - License: "MIT", - BinaryName: projectName, - Checksums: checksums, - } - - if dryRun { - return p.dryRunPublish(data, cfg) - } - - return p.executePublish(ctx, release.ProjectDir, data, cfg) -} - -type scoopTemplateData struct { - PackageName string - Description string - Repository string - Version string - License string - BinaryName string - Checksums ChecksumMap -} - -func (p *ScoopPublisher) parseConfig(pubCfg PublisherConfig, relCfg ReleaseConfig) ScoopConfig { - cfg := ScoopConfig{} - - if ext, ok := pubCfg.Extended.(map[string]any); ok { - if bucket, ok := ext["bucket"].(string); ok && bucket != "" { - cfg.Bucket = bucket - } - if official, ok := ext["official"].(map[string]any); ok { - cfg.Official = &OfficialConfig{} - if enabled, ok := official["enabled"].(bool); ok { - cfg.Official.Enabled = enabled - } - if output, ok := official["output"].(string); ok { - cfg.Official.Output = output - } - } - } - - return cfg -} - -func (p *ScoopPublisher) dryRunPublish(data scoopTemplateData, cfg ScoopConfig) error { - fmt.Println() - fmt.Println("=== DRY RUN: Scoop Publish ===") - fmt.Println() - fmt.Printf("Package: %s\n", data.PackageName) - fmt.Printf("Version: %s\n", data.Version) - fmt.Printf("Bucket: %s\n", cfg.Bucket) - fmt.Printf("Repository: %s\n", data.Repository) - fmt.Println() - - manifest, err := p.renderTemplate("templates/scoop/manifest.json.tmpl", data) - if err != nil { - return fmt.Errorf("scoop.dryRunPublish: %w", err) - } - fmt.Println("Generated manifest.json:") - fmt.Println("---") - fmt.Println(manifest) - fmt.Println("---") - fmt.Println() - - if cfg.Bucket != "" { - fmt.Printf("Would commit to bucket: %s\n", cfg.Bucket) - } - if cfg.Official != nil && cfg.Official.Enabled { - output := cfg.Official.Output - if output == "" { - output = "dist/scoop" - } - fmt.Printf("Would write files for official PR to: %s\n", output) - } - fmt.Println() - fmt.Println("=== END DRY RUN ===") - - return nil -} - -func (p *ScoopPublisher) executePublish(ctx context.Context, projectDir string, data scoopTemplateData, cfg ScoopConfig) error { - manifest, err := p.renderTemplate("templates/scoop/manifest.json.tmpl", data) - if err != nil { - return fmt.Errorf("scoop.Publish: failed to render manifest: %w", err) - } - - // If official config is enabled, write to output directory - if cfg.Official != nil && cfg.Official.Enabled { - output := cfg.Official.Output - if output == "" { - output = filepath.Join(projectDir, "dist", "scoop") - } else if !filepath.IsAbs(output) { - output = filepath.Join(projectDir, output) - } - - if err := os.MkdirAll(output, 0755); err != nil { - return fmt.Errorf("scoop.Publish: failed to create output directory: %w", err) - } - - manifestPath := filepath.Join(output, fmt.Sprintf("%s.json", data.PackageName)) - if err := os.WriteFile(manifestPath, []byte(manifest), 0644); err != nil { - return fmt.Errorf("scoop.Publish: failed to write manifest: %w", err) - } - fmt.Printf("Wrote Scoop manifest for official PR: %s\n", manifestPath) - } - - // If bucket is configured, commit to it - if cfg.Bucket != "" { - if err := p.commitToBucket(ctx, cfg.Bucket, data, manifest); err != nil { - return err - } - } - - return nil -} - -func (p *ScoopPublisher) commitToBucket(ctx context.Context, bucket string, data scoopTemplateData, manifest string) error { - tmpDir, err := os.MkdirTemp("", "scoop-bucket-*") - if err != nil { - return fmt.Errorf("scoop.Publish: failed to create temp directory: %w", err) - } - defer os.RemoveAll(tmpDir) - - fmt.Printf("Cloning bucket %s...\n", bucket) - cmd := exec.CommandContext(ctx, "gh", "repo", "clone", bucket, tmpDir, "--", "--depth=1") - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("scoop.Publish: failed to clone bucket: %w", err) - } - - // Ensure bucket directory exists - bucketDir := filepath.Join(tmpDir, "bucket") - if _, err := os.Stat(bucketDir); os.IsNotExist(err) { - bucketDir = tmpDir // Some repos put manifests in root - } - - manifestPath := filepath.Join(bucketDir, fmt.Sprintf("%s.json", data.PackageName)) - if err := os.WriteFile(manifestPath, []byte(manifest), 0644); err != nil { - return fmt.Errorf("scoop.Publish: failed to write manifest: %w", err) - } - - commitMsg := fmt.Sprintf("Update %s to %s", data.PackageName, data.Version) - - cmd = exec.CommandContext(ctx, "git", "add", ".") - cmd.Dir = tmpDir - if err := cmd.Run(); err != nil { - return fmt.Errorf("scoop.Publish: git add failed: %w", err) - } - - cmd = exec.CommandContext(ctx, "git", "commit", "-m", commitMsg) - cmd.Dir = tmpDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("scoop.Publish: git commit failed: %w", err) - } - - cmd = exec.CommandContext(ctx, "git", "push") - cmd.Dir = tmpDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return fmt.Errorf("scoop.Publish: git push failed: %w", err) - } - - fmt.Printf("Updated Scoop bucket: %s\n", bucket) - return nil -} - -func (p *ScoopPublisher) renderTemplate(name string, data scoopTemplateData) (string, error) { - content, err := scoopTemplates.ReadFile(name) - if err != nil { - return "", fmt.Errorf("failed to read template %s: %w", name, err) - } - - tmpl, err := template.New(filepath.Base(name)).Parse(string(content)) - if err != nil { - return "", fmt.Errorf("failed to parse template %s: %w", name, err) - } - - var buf bytes.Buffer - if err := tmpl.Execute(&buf, data); err != nil { - return "", fmt.Errorf("failed to execute template %s: %w", name, err) - } - - return buf.String(), nil -} - -// Ensure build package is used -var _ = build.Artifact{} diff --git a/pkg/release/publishers/scoop_test.go b/pkg/release/publishers/scoop_test.go deleted file mode 100644 index 5c8d6b4..0000000 --- a/pkg/release/publishers/scoop_test.go +++ /dev/null @@ -1,307 +0,0 @@ -package publishers - -import ( - "bytes" - "os" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestScoopPublisher_Name_Good(t *testing.T) { - t.Run("returns scoop", func(t *testing.T) { - p := NewScoopPublisher() - assert.Equal(t, "scoop", p.Name()) - }) -} - -func TestScoopPublisher_ParseConfig_Good(t *testing.T) { - p := NewScoopPublisher() - - t.Run("uses defaults when no extended config", func(t *testing.T) { - pubCfg := PublisherConfig{Type: "scoop"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Bucket) - assert.Nil(t, cfg.Official) - }) - - t.Run("parses bucket from extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "scoop", - Extended: map[string]any{ - "bucket": "host-uk/scoop-bucket", - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Equal(t, "host-uk/scoop-bucket", cfg.Bucket) - }) - - t.Run("parses official config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "scoop", - Extended: map[string]any{ - "official": map[string]any{ - "enabled": true, - "output": "dist/scoop-manifest", - }, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - require.NotNil(t, cfg.Official) - assert.True(t, cfg.Official.Enabled) - assert.Equal(t, "dist/scoop-manifest", cfg.Official.Output) - }) - - t.Run("handles missing official fields", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "scoop", - Extended: map[string]any{ - "official": map[string]any{}, - }, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - require.NotNil(t, cfg.Official) - assert.False(t, cfg.Official.Enabled) - assert.Empty(t, cfg.Official.Output) - }) - - t.Run("handles nil extended config", func(t *testing.T) { - pubCfg := PublisherConfig{ - Type: "scoop", - Extended: nil, - } - relCfg := &mockReleaseConfig{repository: "owner/repo"} - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Bucket) - assert.Nil(t, cfg.Official) - }) -} - -func TestScoopPublisher_RenderTemplate_Good(t *testing.T) { - p := NewScoopPublisher() - - t.Run("renders manifest template with data", func(t *testing.T) { - data := scoopTemplateData{ - PackageName: "myapp", - Description: "My awesome CLI", - Repository: "owner/myapp", - Version: "1.2.3", - License: "MIT", - BinaryName: "myapp", - Checksums: ChecksumMap{ - WindowsAmd64: "abc123", - WindowsArm64: "def456", - }, - } - - result, err := p.renderTemplate("templates/scoop/manifest.json.tmpl", data) - require.NoError(t, err) - - assert.Contains(t, result, `"version": "1.2.3"`) - assert.Contains(t, result, `"description": "My awesome CLI"`) - assert.Contains(t, result, `"homepage": "https://github.com/owner/myapp"`) - assert.Contains(t, result, `"license": "MIT"`) - assert.Contains(t, result, `"64bit"`) - assert.Contains(t, result, `"arm64"`) - assert.Contains(t, result, "myapp-windows-amd64.zip") - assert.Contains(t, result, "myapp-windows-arm64.zip") - assert.Contains(t, result, `"hash": "abc123"`) - assert.Contains(t, result, `"hash": "def456"`) - assert.Contains(t, result, `"bin": "myapp.exe"`) - }) - - t.Run("includes autoupdate configuration", func(t *testing.T) { - data := scoopTemplateData{ - PackageName: "tool", - Description: "A tool", - Repository: "org/tool", - Version: "2.0.0", - License: "Apache-2.0", - BinaryName: "tool", - Checksums: ChecksumMap{}, - } - - result, err := p.renderTemplate("templates/scoop/manifest.json.tmpl", data) - require.NoError(t, err) - - assert.Contains(t, result, `"checkver"`) - assert.Contains(t, result, `"github": "https://github.com/org/tool"`) - assert.Contains(t, result, `"autoupdate"`) - }) -} - -func TestScoopPublisher_RenderTemplate_Bad(t *testing.T) { - p := NewScoopPublisher() - - t.Run("returns error for non-existent template", func(t *testing.T) { - data := scoopTemplateData{} - _, err := p.renderTemplate("templates/scoop/nonexistent.tmpl", data) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to read template") - }) -} - -func TestScoopPublisher_DryRunPublish_Good(t *testing.T) { - p := NewScoopPublisher() - - t.Run("outputs expected dry run information", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - data := scoopTemplateData{ - PackageName: "myapp", - Version: "1.0.0", - Repository: "owner/repo", - BinaryName: "myapp", - Checksums: ChecksumMap{}, - } - cfg := ScoopConfig{ - Bucket: "owner/scoop-bucket", - } - - err := p.dryRunPublish(data, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - - assert.Contains(t, output, "DRY RUN: Scoop Publish") - assert.Contains(t, output, "Package: myapp") - assert.Contains(t, output, "Version: 1.0.0") - assert.Contains(t, output, "Bucket: owner/scoop-bucket") - assert.Contains(t, output, "Repository: owner/repo") - assert.Contains(t, output, "Generated manifest.json:") - assert.Contains(t, output, "Would commit to bucket: owner/scoop-bucket") - assert.Contains(t, output, "END DRY RUN") - }) - - t.Run("shows official output path when enabled", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - data := scoopTemplateData{ - PackageName: "myapp", - Version: "1.0.0", - BinaryName: "myapp", - Checksums: ChecksumMap{}, - } - cfg := ScoopConfig{ - Official: &OfficialConfig{ - Enabled: true, - Output: "custom/scoop/path", - }, - } - - err := p.dryRunPublish(data, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "Would write files for official PR to: custom/scoop/path") - }) - - t.Run("uses default official output path when not specified", func(t *testing.T) { - oldStdout := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - data := scoopTemplateData{ - PackageName: "myapp", - Version: "1.0.0", - BinaryName: "myapp", - Checksums: ChecksumMap{}, - } - cfg := ScoopConfig{ - Official: &OfficialConfig{ - Enabled: true, - }, - } - - err := p.dryRunPublish(data, cfg) - - w.Close() - var buf bytes.Buffer - _, _ = buf.ReadFrom(r) - os.Stdout = oldStdout - - require.NoError(t, err) - output := buf.String() - assert.Contains(t, output, "Would write files for official PR to: dist/scoop") - }) -} - -func TestScoopPublisher_Publish_Bad(t *testing.T) { - p := NewScoopPublisher() - - t.Run("fails when bucket not configured and not official mode", func(t *testing.T) { - release := &Release{ - Version: "v1.0.0", - ProjectDir: "/project", - } - pubCfg := PublisherConfig{Type: "scoop"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - err := p.Publish(nil, release, pubCfg, relCfg, false) - assert.Error(t, err) - assert.Contains(t, err.Error(), "bucket is required") - }) -} - -func TestScoopConfig_Defaults_Good(t *testing.T) { - t.Run("has sensible defaults", func(t *testing.T) { - p := NewScoopPublisher() - pubCfg := PublisherConfig{Type: "scoop"} - relCfg := &mockReleaseConfig{repository: "owner/repo"} - - cfg := p.parseConfig(pubCfg, relCfg) - - assert.Empty(t, cfg.Bucket) - assert.Nil(t, cfg.Official) - }) -} - -func TestScoopTemplateData_Good(t *testing.T) { - t.Run("struct has all expected fields", func(t *testing.T) { - data := scoopTemplateData{ - PackageName: "myapp", - Description: "description", - Repository: "org/repo", - Version: "1.0.0", - License: "MIT", - BinaryName: "myapp", - Checksums: ChecksumMap{ - WindowsAmd64: "hash1", - WindowsArm64: "hash2", - }, - } - - assert.Equal(t, "myapp", data.PackageName) - assert.Equal(t, "description", data.Description) - assert.Equal(t, "org/repo", data.Repository) - assert.Equal(t, "1.0.0", data.Version) - assert.Equal(t, "MIT", data.License) - assert.Equal(t, "myapp", data.BinaryName) - assert.Equal(t, "hash1", data.Checksums.WindowsAmd64) - assert.Equal(t, "hash2", data.Checksums.WindowsArm64) - }) -} diff --git a/pkg/release/publishers/templates/aur/.SRCINFO.tmpl b/pkg/release/publishers/templates/aur/.SRCINFO.tmpl deleted file mode 100644 index af3ad66..0000000 --- a/pkg/release/publishers/templates/aur/.SRCINFO.tmpl +++ /dev/null @@ -1,16 +0,0 @@ -pkgbase = {{.PackageName}}-bin - pkgdesc = {{.Description}} - pkgver = {{.Version}} - pkgrel = 1 - url = https://github.com/{{.Repository}} - arch = x86_64 - arch = aarch64 - license = {{.License}} - provides = {{.PackageName}} - conflicts = {{.PackageName}} - source_x86_64 = {{.PackageName}}-bin-{{.Version}}-x86_64.tar.gz::https://github.com/{{.Repository}}/releases/download/v{{.Version}}/{{.BinaryName}}-linux-amd64.tar.gz - sha256sums_x86_64 = {{.Checksums.LinuxAmd64}} - source_aarch64 = {{.PackageName}}-bin-{{.Version}}-aarch64.tar.gz::https://github.com/{{.Repository}}/releases/download/v{{.Version}}/{{.BinaryName}}-linux-arm64.tar.gz - sha256sums_aarch64 = {{.Checksums.LinuxArm64}} - -pkgname = {{.PackageName}}-bin diff --git a/pkg/release/publishers/templates/aur/PKGBUILD.tmpl b/pkg/release/publishers/templates/aur/PKGBUILD.tmpl deleted file mode 100644 index 61096bf..0000000 --- a/pkg/release/publishers/templates/aur/PKGBUILD.tmpl +++ /dev/null @@ -1,20 +0,0 @@ -# Maintainer: {{.Maintainer}} -pkgname={{.PackageName}}-bin -pkgver={{.Version}} -pkgrel=1 -pkgdesc="{{.Description}}" -arch=('x86_64' 'aarch64') -url="https://github.com/{{.Repository}}" -license=('{{.License}}') -provides=('{{.PackageName}}') -conflicts=('{{.PackageName}}') - -source_x86_64=("${pkgname}-${pkgver}-x86_64.tar.gz::https://github.com/{{.Repository}}/releases/download/v${pkgver}/{{.BinaryName}}-linux-amd64.tar.gz") -source_aarch64=("${pkgname}-${pkgver}-aarch64.tar.gz::https://github.com/{{.Repository}}/releases/download/v${pkgver}/{{.BinaryName}}-linux-arm64.tar.gz") - -sha256sums_x86_64=('{{.Checksums.LinuxAmd64}}') -sha256sums_aarch64=('{{.Checksums.LinuxArm64}}') - -package() { - install -Dm755 {{.BinaryName}} "${pkgdir}/usr/bin/{{.BinaryName}}" -} diff --git a/pkg/release/publishers/templates/chocolatey/package.nuspec.tmpl b/pkg/release/publishers/templates/chocolatey/package.nuspec.tmpl deleted file mode 100644 index c96ca7d..0000000 --- a/pkg/release/publishers/templates/chocolatey/package.nuspec.tmpl +++ /dev/null @@ -1,18 +0,0 @@ - - - - {{.PackageName}} - {{.Version}} - {{.Title}} - {{.Authors}} - https://github.com/{{.Repository}} - https://github.com/{{.Repository}}/blob/main/LICENSE - false - {{.Description}} - {{.Tags}} - https://github.com/{{.Repository}}/releases/tag/v{{.Version}} - - - - - diff --git a/pkg/release/publishers/templates/chocolatey/tools/chocolateyinstall.ps1.tmpl b/pkg/release/publishers/templates/chocolatey/tools/chocolateyinstall.ps1.tmpl deleted file mode 100644 index a915be8..0000000 --- a/pkg/release/publishers/templates/chocolatey/tools/chocolateyinstall.ps1.tmpl +++ /dev/null @@ -1,13 +0,0 @@ -$ErrorActionPreference = 'Stop' -$toolsDir = "$(Split-Path -parent $MyInvocation.MyCommand.Definition)" -$url64 = 'https://github.com/{{.Repository}}/releases/download/v{{.Version}}/{{.BinaryName}}-windows-amd64.zip' - -$packageArgs = @{ - packageName = '{{.PackageName}}' - unzipLocation = $toolsDir - url64bit = $url64 - checksum64 = '{{.Checksums.WindowsAmd64}}' - checksumType64 = 'sha256' -} - -Install-ChocolateyZipPackage @packageArgs diff --git a/pkg/release/publishers/templates/homebrew/formula.rb.tmpl b/pkg/release/publishers/templates/homebrew/formula.rb.tmpl deleted file mode 100644 index aa03fcb..0000000 --- a/pkg/release/publishers/templates/homebrew/formula.rb.tmpl +++ /dev/null @@ -1,37 +0,0 @@ -# typed: false -# frozen_string_literal: true - -class {{.FormulaClass}} < Formula - desc "{{.Description}}" - homepage "https://github.com/{{.Repository}}" - version "{{.Version}}" - license "{{.License}}" - - on_macos do - if Hardware::CPU.arm? - url "https://github.com/{{.Repository}}/releases/download/v{{.Version}}/{{.BinaryName}}-darwin-arm64.tar.gz" - sha256 "{{.Checksums.DarwinArm64}}" - else - url "https://github.com/{{.Repository}}/releases/download/v{{.Version}}/{{.BinaryName}}-darwin-amd64.tar.gz" - sha256 "{{.Checksums.DarwinAmd64}}" - end - end - - on_linux do - if Hardware::CPU.arm? - url "https://github.com/{{.Repository}}/releases/download/v{{.Version}}/{{.BinaryName}}-linux-arm64.tar.gz" - sha256 "{{.Checksums.LinuxArm64}}" - else - url "https://github.com/{{.Repository}}/releases/download/v{{.Version}}/{{.BinaryName}}-linux-amd64.tar.gz" - sha256 "{{.Checksums.LinuxAmd64}}" - end - end - - def install - bin.install "{{.BinaryName}}" - end - - test do - system "#{bin}/{{.BinaryName}}", "--version" - end -end diff --git a/pkg/release/publishers/templates/npm/install.js.tmpl b/pkg/release/publishers/templates/npm/install.js.tmpl deleted file mode 100644 index bf924f6..0000000 --- a/pkg/release/publishers/templates/npm/install.js.tmpl +++ /dev/null @@ -1,176 +0,0 @@ -#!/usr/bin/env node -/** - * Binary installer for {{.Package}} - * Downloads the correct binary for the current platform from GitHub releases. - */ - -const fs = require('fs'); -const path = require('path'); -const https = require('https'); -const { spawnSync } = require('child_process'); -const crypto = require('crypto'); - -const PACKAGE_VERSION = '{{.Version}}'; -const GITHUB_REPO = '{{.Repository}}'; -const BINARY_NAME = '{{.BinaryName}}'; - -// Platform/arch mapping -const PLATFORM_MAP = { - darwin: 'darwin', - linux: 'linux', - win32: 'windows', -}; - -const ARCH_MAP = { - x64: 'amd64', - arm64: 'arm64', -}; - -function getPlatformInfo() { - const platform = PLATFORM_MAP[process.platform]; - const arch = ARCH_MAP[process.arch]; - - if (!platform || !arch) { - console.error(`Unsupported platform: ${process.platform}/${process.arch}`); - process.exit(1); - } - - return { platform, arch }; -} - -function getDownloadUrl(platform, arch) { - const ext = platform === 'windows' ? '.zip' : '.tar.gz'; - const name = `${BINARY_NAME}-${platform}-${arch}${ext}`; - return `https://github.com/${GITHUB_REPO}/releases/download/v${PACKAGE_VERSION}/${name}`; -} - -function getChecksumsUrl() { - return `https://github.com/${GITHUB_REPO}/releases/download/v${PACKAGE_VERSION}/checksums.txt`; -} - -function download(url) { - return new Promise((resolve, reject) => { - const request = (url) => { - https.get(url, (res) => { - if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) { - // Follow redirect - request(res.headers.location); - return; - } - - if (res.statusCode !== 200) { - reject(new Error(`Failed to download ${url}: HTTP ${res.statusCode}`)); - return; - } - - const chunks = []; - res.on('data', (chunk) => chunks.push(chunk)); - res.on('end', () => resolve(Buffer.concat(chunks))); - res.on('error', reject); - }).on('error', reject); - }; - request(url); - }); -} - -async function fetchChecksums() { - try { - const data = await download(getChecksumsUrl()); - const checksums = {}; - data.toString().split('\n').forEach((line) => { - const parts = line.trim().split(/\s+/); - if (parts.length === 2) { - checksums[parts[1]] = parts[0]; - } - }); - return checksums; - } catch (err) { - console.warn('Warning: Could not fetch checksums, skipping verification'); - return null; - } -} - -function verifyChecksum(data, expectedHash) { - const actualHash = crypto.createHash('sha256').update(data).digest('hex'); - return actualHash === expectedHash; -} - -function extract(data, destDir, platform) { - const tempFile = path.join(destDir, platform === 'windows' ? 'temp.zip' : 'temp.tar.gz'); - fs.writeFileSync(tempFile, data); - - try { - if (platform === 'windows') { - // Use PowerShell to extract zip - const result = spawnSync('powershell', [ - '-command', - `Expand-Archive -Path '${tempFile}' -DestinationPath '${destDir}' -Force` - ], { stdio: 'ignore' }); - if (result.status !== 0) { - throw new Error('Failed to extract zip'); - } - } else { - const result = spawnSync('tar', ['-xzf', tempFile, '-C', destDir], { stdio: 'ignore' }); - if (result.status !== 0) { - throw new Error('Failed to extract tar.gz'); - } - } - } finally { - fs.unlinkSync(tempFile); - } -} - -async function main() { - const { platform, arch } = getPlatformInfo(); - const binDir = path.join(__dirname, 'bin'); - const binaryPath = path.join(binDir, platform === 'windows' ? `${BINARY_NAME}.exe` : BINARY_NAME); - - // Skip if binary already exists - if (fs.existsSync(binaryPath)) { - console.log(`${BINARY_NAME} binary already installed`); - return; - } - - console.log(`Installing ${BINARY_NAME} v${PACKAGE_VERSION} for ${platform}/${arch}...`); - - // Ensure bin directory exists - if (!fs.existsSync(binDir)) { - fs.mkdirSync(binDir, { recursive: true }); - } - - // Fetch checksums - const checksums = await fetchChecksums(); - - // Download binary - const url = getDownloadUrl(platform, arch); - console.log(`Downloading from ${url}`); - - const data = await download(url); - - // Verify checksum if available - if (checksums) { - const ext = platform === 'windows' ? '.zip' : '.tar.gz'; - const filename = `${BINARY_NAME}-${platform}-${arch}${ext}`; - const expectedHash = checksums[filename]; - if (expectedHash && !verifyChecksum(data, expectedHash)) { - console.error('Checksum verification failed!'); - process.exit(1); - } - console.log('Checksum verified'); - } - - // Extract - extract(data, binDir, platform); - - // Make executable on Unix - if (platform !== 'windows') { - fs.chmodSync(binaryPath, 0o755); - } - - console.log(`${BINARY_NAME} installed successfully`); -} - -main().catch((err) => { - console.error(`Installation failed: ${err.message}`); - process.exit(1); -}); diff --git a/pkg/release/publishers/templates/npm/package.json.tmpl b/pkg/release/publishers/templates/npm/package.json.tmpl deleted file mode 100644 index a7d0962..0000000 --- a/pkg/release/publishers/templates/npm/package.json.tmpl +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "{{.Package}}", - "version": "{{.Version}}", - "description": "{{.Description}}", - "license": "{{.License}}", - "repository": { - "type": "git", - "url": "https://github.com/{{.Repository}}.git" - }, - "homepage": "https://github.com/{{.Repository}}", - "bugs": { - "url": "https://github.com/{{.Repository}}/issues" - }, - "bin": { - "{{.BinaryName}}": "./bin/run.js" - }, - "scripts": { - "postinstall": "node ./install.js" - }, - "files": [ - "bin/", - "install.js" - ], - "engines": { - "node": ">=14.0.0" - }, - "keywords": [ - "cli", - "{{.ProjectName}}" - ], - "publishConfig": { - "access": "{{.Access}}" - } -} diff --git a/pkg/release/publishers/templates/npm/run.js.tmpl b/pkg/release/publishers/templates/npm/run.js.tmpl deleted file mode 100644 index 8a04a68..0000000 --- a/pkg/release/publishers/templates/npm/run.js.tmpl +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env node -/** - * Binary wrapper for {{.Package}} - * Executes the platform-specific binary. - */ - -const { spawn } = require('child_process'); -const path = require('path'); -const fs = require('fs'); - -const BINARY_NAME = '{{.BinaryName}}'; - -function getBinaryPath() { - const binDir = path.join(__dirname); - const isWindows = process.platform === 'win32'; - const binaryName = isWindows ? `${BINARY_NAME}.exe` : BINARY_NAME; - return path.join(binDir, binaryName); -} - -function main() { - const binaryPath = getBinaryPath(); - - if (!fs.existsSync(binaryPath)) { - console.error(`Binary not found at ${binaryPath}`); - console.error('Try reinstalling the package: npm install -g {{.Package}}'); - process.exit(1); - } - - const child = spawn(binaryPath, process.argv.slice(2), { - stdio: 'inherit', - windowsHide: true, - }); - - child.on('error', (err) => { - console.error(`Failed to start ${BINARY_NAME}: ${err.message}`); - process.exit(1); - }); - - child.on('exit', (code, signal) => { - if (signal) { - process.kill(process.pid, signal); - } else { - process.exit(code ?? 0); - } - }); -} - -main(); diff --git a/pkg/release/publishers/templates/scoop/manifest.json.tmpl b/pkg/release/publishers/templates/scoop/manifest.json.tmpl deleted file mode 100644 index 6455225..0000000 --- a/pkg/release/publishers/templates/scoop/manifest.json.tmpl +++ /dev/null @@ -1,30 +0,0 @@ -{ - "version": "{{.Version}}", - "description": "{{.Description}}", - "homepage": "https://github.com/{{.Repository}}", - "license": "{{.License}}", - "architecture": { - "64bit": { - "url": "https://github.com/{{.Repository}}/releases/download/v{{.Version}}/{{.BinaryName}}-windows-amd64.zip", - "hash": "{{.Checksums.WindowsAmd64}}" - }, - "arm64": { - "url": "https://github.com/{{.Repository}}/releases/download/v{{.Version}}/{{.BinaryName}}-windows-arm64.zip", - "hash": "{{.Checksums.WindowsArm64}}" - } - }, - "bin": "{{.BinaryName}}.exe", - "checkver": { - "github": "https://github.com/{{.Repository}}" - }, - "autoupdate": { - "architecture": { - "64bit": { - "url": "https://github.com/{{.Repository}}/releases/download/v$version/{{.BinaryName}}-windows-amd64.zip" - }, - "arm64": { - "url": "https://github.com/{{.Repository}}/releases/download/v$version/{{.BinaryName}}-windows-arm64.zip" - } - } - } -} diff --git a/pkg/release/release.go b/pkg/release/release.go deleted file mode 100644 index 699e354..0000000 --- a/pkg/release/release.go +++ /dev/null @@ -1,431 +0,0 @@ -// Package release provides release automation with changelog generation and publishing. -// It orchestrates the build system, changelog generation, and publishing to targets -// like GitHub Releases. -package release - -import ( - "context" - "fmt" - "os" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/build" - "github.com/host-uk/core/pkg/build/builders" - "github.com/host-uk/core/pkg/release/publishers" -) - -// Release represents a release with its version, artifacts, and changelog. -type Release struct { - // Version is the semantic version string (e.g., "v1.2.3"). - Version string - // Artifacts are the built release artifacts (archives with checksums). - Artifacts []build.Artifact - // Changelog is the generated markdown changelog. - Changelog string - // ProjectDir is the root directory of the project. - ProjectDir string -} - -// Publish publishes pre-built artifacts from dist/ to configured targets. -// Use this after `core build` to separate build and publish concerns. -// If dryRun is true, it will show what would be done without actually publishing. -func Publish(ctx context.Context, cfg *Config, dryRun bool) (*Release, error) { - if cfg == nil { - return nil, fmt.Errorf("release.Publish: config is nil") - } - - projectDir := cfg.projectDir - if projectDir == "" { - projectDir = "." - } - - // Resolve to absolute path - absProjectDir, err := filepath.Abs(projectDir) - if err != nil { - return nil, fmt.Errorf("release.Publish: failed to resolve project directory: %w", err) - } - - // Step 1: Determine version - version := cfg.version - if version == "" { - version, err = DetermineVersion(absProjectDir) - if err != nil { - return nil, fmt.Errorf("release.Publish: failed to determine version: %w", err) - } - } - - // Step 2: Find pre-built artifacts in dist/ - distDir := filepath.Join(absProjectDir, "dist") - artifacts, err := findArtifacts(distDir) - if err != nil { - return nil, fmt.Errorf("release.Publish: %w", err) - } - - if len(artifacts) == 0 { - return nil, fmt.Errorf("release.Publish: no artifacts found in dist/\nRun 'core build' first to create artifacts") - } - - // Step 3: Generate changelog - changelog, err := Generate(absProjectDir, "", version) - if err != nil { - // Non-fatal: continue with empty changelog - changelog = fmt.Sprintf("Release %s", version) - } - - release := &Release{ - Version: version, - Artifacts: artifacts, - Changelog: changelog, - ProjectDir: absProjectDir, - } - - // Step 4: Publish to configured targets - if len(cfg.Publishers) > 0 { - pubRelease := publishers.NewRelease(release.Version, release.Artifacts, release.Changelog, release.ProjectDir) - - for _, pubCfg := range cfg.Publishers { - publisher, err := getPublisher(pubCfg.Type) - if err != nil { - return release, fmt.Errorf("release.Publish: %w", err) - } - - extendedCfg := buildExtendedConfig(pubCfg) - publisherCfg := publishers.NewPublisherConfig(pubCfg.Type, pubCfg.Prerelease, pubCfg.Draft, extendedCfg) - if err := publisher.Publish(ctx, pubRelease, publisherCfg, cfg, dryRun); err != nil { - return release, fmt.Errorf("release.Publish: publish to %s failed: %w", pubCfg.Type, err) - } - } - } - - return release, nil -} - -// findArtifacts discovers pre-built artifacts in the dist directory. -func findArtifacts(distDir string) ([]build.Artifact, error) { - if _, err := os.Stat(distDir); os.IsNotExist(err) { - return nil, fmt.Errorf("dist/ directory not found") - } - - var artifacts []build.Artifact - - entries, err := os.ReadDir(distDir) - if err != nil { - return nil, fmt.Errorf("failed to read dist/: %w", err) - } - - for _, entry := range entries { - if entry.IsDir() { - continue - } - - name := entry.Name() - path := filepath.Join(distDir, name) - - // Include archives and checksums - if strings.HasSuffix(name, ".tar.gz") || - strings.HasSuffix(name, ".zip") || - strings.HasSuffix(name, ".txt") || - strings.HasSuffix(name, ".sig") { - artifacts = append(artifacts, build.Artifact{Path: path}) - } - } - - return artifacts, nil -} - -// Run executes the full release process: determine version, build artifacts, -// generate changelog, and publish to configured targets. -// For separated concerns, prefer using `core build` then `core ci` (Publish). -// If dryRun is true, it will show what would be done without actually publishing. -func Run(ctx context.Context, cfg *Config, dryRun bool) (*Release, error) { - if cfg == nil { - return nil, fmt.Errorf("release.Run: config is nil") - } - - projectDir := cfg.projectDir - if projectDir == "" { - projectDir = "." - } - - // Resolve to absolute path - absProjectDir, err := filepath.Abs(projectDir) - if err != nil { - return nil, fmt.Errorf("release.Run: failed to resolve project directory: %w", err) - } - - // Step 1: Determine version - version := cfg.version - if version == "" { - version, err = DetermineVersion(absProjectDir) - if err != nil { - return nil, fmt.Errorf("release.Run: failed to determine version: %w", err) - } - } - - // Step 2: Generate changelog - changelog, err := Generate(absProjectDir, "", version) - if err != nil { - // Non-fatal: continue with empty changelog - changelog = fmt.Sprintf("Release %s", version) - } - - // Step 3: Build artifacts - artifacts, err := buildArtifacts(ctx, cfg, absProjectDir, version) - if err != nil { - return nil, fmt.Errorf("release.Run: build failed: %w", err) - } - - release := &Release{ - Version: version, - Artifacts: artifacts, - Changelog: changelog, - ProjectDir: absProjectDir, - } - - // Step 4: Publish to configured targets - if len(cfg.Publishers) > 0 { - // Convert to publisher types - pubRelease := publishers.NewRelease(release.Version, release.Artifacts, release.Changelog, release.ProjectDir) - - for _, pubCfg := range cfg.Publishers { - publisher, err := getPublisher(pubCfg.Type) - if err != nil { - return release, fmt.Errorf("release.Run: %w", err) - } - - // Build extended config for publisher-specific settings - extendedCfg := buildExtendedConfig(pubCfg) - publisherCfg := publishers.NewPublisherConfig(pubCfg.Type, pubCfg.Prerelease, pubCfg.Draft, extendedCfg) - if err := publisher.Publish(ctx, pubRelease, publisherCfg, cfg, dryRun); err != nil { - return release, fmt.Errorf("release.Run: publish to %s failed: %w", pubCfg.Type, err) - } - } - } - - return release, nil -} - -// buildArtifacts builds all artifacts for the release. -func buildArtifacts(ctx context.Context, cfg *Config, projectDir, version string) ([]build.Artifact, error) { - // Load build configuration - buildCfg, err := build.LoadConfig(projectDir) - if err != nil { - return nil, fmt.Errorf("failed to load build config: %w", err) - } - - // Determine targets - var targets []build.Target - if len(cfg.Build.Targets) > 0 { - for _, t := range cfg.Build.Targets { - targets = append(targets, build.Target{OS: t.OS, Arch: t.Arch}) - } - } else if len(buildCfg.Targets) > 0 { - targets = buildCfg.ToTargets() - } else { - // Default targets - targets = []build.Target{ - {OS: "linux", Arch: "amd64"}, - {OS: "linux", Arch: "arm64"}, - {OS: "darwin", Arch: "amd64"}, - {OS: "darwin", Arch: "arm64"}, - {OS: "windows", Arch: "amd64"}, - } - } - - // Determine binary name - binaryName := cfg.Project.Name - if binaryName == "" { - binaryName = buildCfg.Project.Binary - } - if binaryName == "" { - binaryName = buildCfg.Project.Name - } - if binaryName == "" { - binaryName = filepath.Base(projectDir) - } - - // Determine output directory - outputDir := filepath.Join(projectDir, "dist") - - // Get builder (detect project type) - projectType, err := build.PrimaryType(projectDir) - if err != nil { - return nil, fmt.Errorf("failed to detect project type: %w", err) - } - - builder, err := getBuilder(projectType) - if err != nil { - return nil, err - } - - // Build configuration - buildConfig := &build.Config{ - ProjectDir: projectDir, - OutputDir: outputDir, - Name: binaryName, - Version: version, - LDFlags: buildCfg.Build.LDFlags, - } - - // Build - artifacts, err := builder.Build(ctx, buildConfig, targets) - if err != nil { - return nil, fmt.Errorf("build failed: %w", err) - } - - // Archive artifacts - archivedArtifacts, err := build.ArchiveAll(artifacts) - if err != nil { - return nil, fmt.Errorf("archive failed: %w", err) - } - - // Compute checksums - checksummedArtifacts, err := build.ChecksumAll(archivedArtifacts) - if err != nil { - return nil, fmt.Errorf("checksum failed: %w", err) - } - - // Write CHECKSUMS.txt - checksumPath := filepath.Join(outputDir, "CHECKSUMS.txt") - if err := build.WriteChecksumFile(checksummedArtifacts, checksumPath); err != nil { - return nil, fmt.Errorf("failed to write checksums file: %w", err) - } - - // Add CHECKSUMS.txt as an artifact - checksumArtifact := build.Artifact{ - Path: checksumPath, - } - checksummedArtifacts = append(checksummedArtifacts, checksumArtifact) - - return checksummedArtifacts, nil -} - -// getBuilder returns the appropriate builder for the project type. -func getBuilder(projectType build.ProjectType) (build.Builder, error) { - switch projectType { - case build.ProjectTypeWails: - return builders.NewWailsBuilder(), nil - case build.ProjectTypeGo: - return builders.NewGoBuilder(), nil - case build.ProjectTypeNode: - return nil, fmt.Errorf("Node.js builder not yet implemented") - case build.ProjectTypePHP: - return nil, fmt.Errorf("PHP builder not yet implemented") - default: - return nil, fmt.Errorf("unsupported project type: %s", projectType) - } -} - -// getPublisher returns the publisher for the given type. -func getPublisher(pubType string) (publishers.Publisher, error) { - switch pubType { - case "github": - return publishers.NewGitHubPublisher(), nil - case "linuxkit": - return publishers.NewLinuxKitPublisher(), nil - case "docker": - return publishers.NewDockerPublisher(), nil - case "npm": - return publishers.NewNpmPublisher(), nil - case "homebrew": - return publishers.NewHomebrewPublisher(), nil - case "scoop": - return publishers.NewScoopPublisher(), nil - case "aur": - return publishers.NewAURPublisher(), nil - case "chocolatey": - return publishers.NewChocolateyPublisher(), nil - default: - return nil, fmt.Errorf("unsupported publisher type: %s", pubType) - } -} - -// buildExtendedConfig builds a map of extended configuration for a publisher. -func buildExtendedConfig(pubCfg PublisherConfig) map[string]any { - ext := make(map[string]any) - - // LinuxKit-specific config - if pubCfg.Config != "" { - ext["config"] = pubCfg.Config - } - if len(pubCfg.Formats) > 0 { - ext["formats"] = toAnySlice(pubCfg.Formats) - } - if len(pubCfg.Platforms) > 0 { - ext["platforms"] = toAnySlice(pubCfg.Platforms) - } - - // Docker-specific config - if pubCfg.Registry != "" { - ext["registry"] = pubCfg.Registry - } - if pubCfg.Image != "" { - ext["image"] = pubCfg.Image - } - if pubCfg.Dockerfile != "" { - ext["dockerfile"] = pubCfg.Dockerfile - } - if len(pubCfg.Tags) > 0 { - ext["tags"] = toAnySlice(pubCfg.Tags) - } - if len(pubCfg.BuildArgs) > 0 { - args := make(map[string]any) - for k, v := range pubCfg.BuildArgs { - args[k] = v - } - ext["build_args"] = args - } - - // npm-specific config - if pubCfg.Package != "" { - ext["package"] = pubCfg.Package - } - if pubCfg.Access != "" { - ext["access"] = pubCfg.Access - } - - // Homebrew-specific config - if pubCfg.Tap != "" { - ext["tap"] = pubCfg.Tap - } - if pubCfg.Formula != "" { - ext["formula"] = pubCfg.Formula - } - - // Scoop-specific config - if pubCfg.Bucket != "" { - ext["bucket"] = pubCfg.Bucket - } - - // AUR-specific config - if pubCfg.Maintainer != "" { - ext["maintainer"] = pubCfg.Maintainer - } - - // Chocolatey-specific config - if pubCfg.Push { - ext["push"] = pubCfg.Push - } - - // Official repo config (shared by multiple publishers) - if pubCfg.Official != nil { - official := make(map[string]any) - official["enabled"] = pubCfg.Official.Enabled - if pubCfg.Official.Output != "" { - official["output"] = pubCfg.Official.Output - } - ext["official"] = official - } - - return ext -} - -// toAnySlice converts a string slice to an any slice. -func toAnySlice(s []string) []any { - result := make([]any, len(s)) - for i, v := range s { - result[i] = v - } - return result -} diff --git a/pkg/release/release_test.go b/pkg/release/release_test.go deleted file mode 100644 index 98cfe79..0000000 --- a/pkg/release/release_test.go +++ /dev/null @@ -1,700 +0,0 @@ -package release - -import ( - "context" - "os" - "os/exec" - "path/filepath" - "testing" - - "github.com/host-uk/core/pkg/build" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestFindArtifacts_Good(t *testing.T) { - t.Run("finds tar.gz artifacts", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - - // Create test artifact files - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app-linux-amd64.tar.gz"), []byte("test"), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app-darwin-arm64.tar.gz"), []byte("test"), 0644)) - - artifacts, err := findArtifacts(distDir) - require.NoError(t, err) - - assert.Len(t, artifacts, 2) - }) - - t.Run("finds zip artifacts", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app-windows-amd64.zip"), []byte("test"), 0644)) - - artifacts, err := findArtifacts(distDir) - require.NoError(t, err) - - assert.Len(t, artifacts, 1) - assert.Contains(t, artifacts[0].Path, "app-windows-amd64.zip") - }) - - t.Run("finds checksum files", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - - require.NoError(t, os.WriteFile(filepath.Join(distDir, "CHECKSUMS.txt"), []byte("checksums"), 0644)) - - artifacts, err := findArtifacts(distDir) - require.NoError(t, err) - - assert.Len(t, artifacts, 1) - assert.Contains(t, artifacts[0].Path, "CHECKSUMS.txt") - }) - - t.Run("finds signature files", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app.tar.gz.sig"), []byte("signature"), 0644)) - - artifacts, err := findArtifacts(distDir) - require.NoError(t, err) - - assert.Len(t, artifacts, 1) - }) - - t.Run("finds mixed artifact types", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app-linux.tar.gz"), []byte("test"), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app-windows.zip"), []byte("test"), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "CHECKSUMS.txt"), []byte("checksums"), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app.sig"), []byte("sig"), 0644)) - - artifacts, err := findArtifacts(distDir) - require.NoError(t, err) - - assert.Len(t, artifacts, 4) - }) - - t.Run("ignores non-artifact files", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - - require.NoError(t, os.WriteFile(filepath.Join(distDir, "README.md"), []byte("readme"), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app.exe"), []byte("binary"), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app.tar.gz"), []byte("artifact"), 0644)) - - artifacts, err := findArtifacts(distDir) - require.NoError(t, err) - - assert.Len(t, artifacts, 1) - assert.Contains(t, artifacts[0].Path, "app.tar.gz") - }) - - t.Run("ignores subdirectories", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - require.NoError(t, os.MkdirAll(filepath.Join(distDir, "subdir"), 0755)) - - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app.tar.gz"), []byte("artifact"), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "subdir", "nested.tar.gz"), []byte("nested"), 0644)) - - artifacts, err := findArtifacts(distDir) - require.NoError(t, err) - - // Should only find the top-level artifact - assert.Len(t, artifacts, 1) - }) - - t.Run("returns empty slice for empty dist directory", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - - artifacts, err := findArtifacts(distDir) - require.NoError(t, err) - - assert.Empty(t, artifacts) - }) -} - -func TestFindArtifacts_Bad(t *testing.T) { - t.Run("returns error when dist directory does not exist", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - - _, err := findArtifacts(distDir) - assert.Error(t, err) - assert.Contains(t, err.Error(), "dist/ directory not found") - }) - - t.Run("returns error when dist directory is unreadable", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - - // Create a file that looks like dist but will cause ReadDir to fail - // by making the directory unreadable - require.NoError(t, os.Chmod(distDir, 0000)) - defer func() { _ = os.Chmod(distDir, 0755) }() - - _, err := findArtifacts(distDir) - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to read dist/") - }) -} - -func TestGetBuilder_Good(t *testing.T) { - t.Run("returns Go builder for go project type", func(t *testing.T) { - builder, err := getBuilder(build.ProjectTypeGo) - require.NoError(t, err) - assert.NotNil(t, builder) - assert.Equal(t, "go", builder.Name()) - }) - - t.Run("returns Wails builder for wails project type", func(t *testing.T) { - builder, err := getBuilder(build.ProjectTypeWails) - require.NoError(t, err) - assert.NotNil(t, builder) - assert.Equal(t, "wails", builder.Name()) - }) -} - -func TestGetBuilder_Bad(t *testing.T) { - t.Run("returns error for Node project type", func(t *testing.T) { - _, err := getBuilder(build.ProjectTypeNode) - assert.Error(t, err) - assert.Contains(t, err.Error(), "Node.js builder not yet implemented") - }) - - t.Run("returns error for PHP project type", func(t *testing.T) { - _, err := getBuilder(build.ProjectTypePHP) - assert.Error(t, err) - assert.Contains(t, err.Error(), "PHP builder not yet implemented") - }) - - t.Run("returns error for unsupported project type", func(t *testing.T) { - _, err := getBuilder(build.ProjectType("unknown")) - assert.Error(t, err) - assert.Contains(t, err.Error(), "unsupported project type") - }) -} - -func TestGetPublisher_Good(t *testing.T) { - tests := []struct { - pubType string - expectedName string - }{ - {"github", "github"}, - {"linuxkit", "linuxkit"}, - {"docker", "docker"}, - {"npm", "npm"}, - {"homebrew", "homebrew"}, - {"scoop", "scoop"}, - {"aur", "aur"}, - {"chocolatey", "chocolatey"}, - } - - for _, tc := range tests { - t.Run(tc.pubType, func(t *testing.T) { - publisher, err := getPublisher(tc.pubType) - require.NoError(t, err) - assert.NotNil(t, publisher) - assert.Equal(t, tc.expectedName, publisher.Name()) - }) - } -} - -func TestGetPublisher_Bad(t *testing.T) { - t.Run("returns error for unsupported publisher type", func(t *testing.T) { - _, err := getPublisher("unsupported") - assert.Error(t, err) - assert.Contains(t, err.Error(), "unsupported publisher type: unsupported") - }) - - t.Run("returns error for empty publisher type", func(t *testing.T) { - _, err := getPublisher("") - assert.Error(t, err) - assert.Contains(t, err.Error(), "unsupported publisher type") - }) -} - -func TestBuildExtendedConfig_Good(t *testing.T) { - t.Run("returns empty map for minimal config", func(t *testing.T) { - cfg := PublisherConfig{ - Type: "github", - } - - ext := buildExtendedConfig(cfg) - assert.Empty(t, ext) - }) - - t.Run("includes LinuxKit config", func(t *testing.T) { - cfg := PublisherConfig{ - Type: "linuxkit", - Config: "linuxkit.yaml", - Formats: []string{"iso", "qcow2"}, - Platforms: []string{"linux/amd64", "linux/arm64"}, - } - - ext := buildExtendedConfig(cfg) - - assert.Equal(t, "linuxkit.yaml", ext["config"]) - assert.Equal(t, []any{"iso", "qcow2"}, ext["formats"]) - assert.Equal(t, []any{"linux/amd64", "linux/arm64"}, ext["platforms"]) - }) - - t.Run("includes Docker config", func(t *testing.T) { - cfg := PublisherConfig{ - Type: "docker", - Registry: "ghcr.io", - Image: "owner/repo", - Dockerfile: "Dockerfile.prod", - Tags: []string{"latest", "v1.0.0"}, - BuildArgs: map[string]string{"VERSION": "1.0.0"}, - } - - ext := buildExtendedConfig(cfg) - - assert.Equal(t, "ghcr.io", ext["registry"]) - assert.Equal(t, "owner/repo", ext["image"]) - assert.Equal(t, "Dockerfile.prod", ext["dockerfile"]) - assert.Equal(t, []any{"latest", "v1.0.0"}, ext["tags"]) - buildArgs := ext["build_args"].(map[string]any) - assert.Equal(t, "1.0.0", buildArgs["VERSION"]) - }) - - t.Run("includes npm config", func(t *testing.T) { - cfg := PublisherConfig{ - Type: "npm", - Package: "@host-uk/core", - Access: "public", - } - - ext := buildExtendedConfig(cfg) - - assert.Equal(t, "@host-uk/core", ext["package"]) - assert.Equal(t, "public", ext["access"]) - }) - - t.Run("includes Homebrew config", func(t *testing.T) { - cfg := PublisherConfig{ - Type: "homebrew", - Tap: "host-uk/tap", - Formula: "core", - } - - ext := buildExtendedConfig(cfg) - - assert.Equal(t, "host-uk/tap", ext["tap"]) - assert.Equal(t, "core", ext["formula"]) - }) - - t.Run("includes Scoop config", func(t *testing.T) { - cfg := PublisherConfig{ - Type: "scoop", - Bucket: "host-uk/bucket", - } - - ext := buildExtendedConfig(cfg) - - assert.Equal(t, "host-uk/bucket", ext["bucket"]) - }) - - t.Run("includes AUR config", func(t *testing.T) { - cfg := PublisherConfig{ - Type: "aur", - Maintainer: "John Doe ", - } - - ext := buildExtendedConfig(cfg) - - assert.Equal(t, "John Doe ", ext["maintainer"]) - }) - - t.Run("includes Chocolatey config", func(t *testing.T) { - cfg := PublisherConfig{ - Type: "chocolatey", - Push: true, - } - - ext := buildExtendedConfig(cfg) - - assert.True(t, ext["push"].(bool)) - }) - - t.Run("includes Official config", func(t *testing.T) { - cfg := PublisherConfig{ - Type: "homebrew", - Official: &OfficialConfig{ - Enabled: true, - Output: "/path/to/output", - }, - } - - ext := buildExtendedConfig(cfg) - - official := ext["official"].(map[string]any) - assert.True(t, official["enabled"].(bool)) - assert.Equal(t, "/path/to/output", official["output"]) - }) - - t.Run("Official config without output", func(t *testing.T) { - cfg := PublisherConfig{ - Type: "scoop", - Official: &OfficialConfig{ - Enabled: true, - }, - } - - ext := buildExtendedConfig(cfg) - - official := ext["official"].(map[string]any) - assert.True(t, official["enabled"].(bool)) - _, hasOutput := official["output"] - assert.False(t, hasOutput) - }) -} - -func TestToAnySlice_Good(t *testing.T) { - t.Run("converts string slice to any slice", func(t *testing.T) { - input := []string{"a", "b", "c"} - - result := toAnySlice(input) - - assert.Len(t, result, 3) - assert.Equal(t, "a", result[0]) - assert.Equal(t, "b", result[1]) - assert.Equal(t, "c", result[2]) - }) - - t.Run("handles empty slice", func(t *testing.T) { - input := []string{} - - result := toAnySlice(input) - - assert.Empty(t, result) - }) - - t.Run("handles single element", func(t *testing.T) { - input := []string{"only"} - - result := toAnySlice(input) - - assert.Len(t, result, 1) - assert.Equal(t, "only", result[0]) - }) -} - -func TestPublish_Good(t *testing.T) { - t.Run("returns release with version from config", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app.tar.gz"), []byte("test"), 0644)) - - cfg := DefaultConfig() - cfg.SetProjectDir(dir) - cfg.SetVersion("v1.0.0") - cfg.Publishers = nil // No publishers to avoid network calls - - release, err := Publish(context.Background(), cfg, true) - require.NoError(t, err) - - assert.Equal(t, "v1.0.0", release.Version) - assert.Len(t, release.Artifacts, 1) - }) - - t.Run("finds artifacts in dist directory", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app-linux.tar.gz"), []byte("test"), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app-darwin.tar.gz"), []byte("test"), 0644)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "CHECKSUMS.txt"), []byte("checksums"), 0644)) - - cfg := DefaultConfig() - cfg.SetProjectDir(dir) - cfg.SetVersion("v1.0.0") - cfg.Publishers = nil - - release, err := Publish(context.Background(), cfg, true) - require.NoError(t, err) - - assert.Len(t, release.Artifacts, 3) - }) -} - -func TestPublish_Bad(t *testing.T) { - t.Run("returns error when config is nil", func(t *testing.T) { - _, err := Publish(context.Background(), nil, true) - assert.Error(t, err) - assert.Contains(t, err.Error(), "config is nil") - }) - - t.Run("returns error when dist directory missing", func(t *testing.T) { - dir := t.TempDir() - - cfg := DefaultConfig() - cfg.SetProjectDir(dir) - cfg.SetVersion("v1.0.0") - - _, err := Publish(context.Background(), cfg, true) - assert.Error(t, err) - assert.Contains(t, err.Error(), "dist/ directory not found") - }) - - t.Run("returns error when no artifacts found", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - - cfg := DefaultConfig() - cfg.SetProjectDir(dir) - cfg.SetVersion("v1.0.0") - - _, err := Publish(context.Background(), cfg, true) - assert.Error(t, err) - assert.Contains(t, err.Error(), "no artifacts found") - }) - - t.Run("returns error for unsupported publisher", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app.tar.gz"), []byte("test"), 0644)) - - cfg := DefaultConfig() - cfg.SetProjectDir(dir) - cfg.SetVersion("v1.0.0") - cfg.Publishers = []PublisherConfig{ - {Type: "unsupported"}, - } - - _, err := Publish(context.Background(), cfg, true) - assert.Error(t, err) - assert.Contains(t, err.Error(), "unsupported publisher type") - }) - - t.Run("returns error when version determination fails in non-git dir", func(t *testing.T) { - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app.tar.gz"), []byte("test"), 0644)) - - cfg := DefaultConfig() - cfg.SetProjectDir(dir) - // Don't set version - let it try to determine from git - cfg.Publishers = nil - - // In a non-git directory, DetermineVersion returns v0.0.1 as default - // so we verify that the publish proceeds without error - release, err := Publish(context.Background(), cfg, true) - require.NoError(t, err) - assert.Equal(t, "v0.0.1", release.Version) - }) -} - -func TestRun_Good(t *testing.T) { - t.Run("returns release with version from config", func(t *testing.T) { - // Create a minimal Go project for testing - dir := t.TempDir() - - // Create go.mod - goMod := `module testapp - -go 1.21 -` - require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte(goMod), 0644)) - - // Create main.go - mainGo := `package main - -func main() {} -` - require.NoError(t, os.WriteFile(filepath.Join(dir, "main.go"), []byte(mainGo), 0644)) - - cfg := DefaultConfig() - cfg.SetProjectDir(dir) - cfg.SetVersion("v1.0.0") - cfg.Project.Name = "testapp" - cfg.Build.Targets = []TargetConfig{} // Empty targets to use defaults - cfg.Publishers = nil // No publishers to avoid network calls - - // Note: This test will actually try to build, which may fail in CI - // So we just test that the function accepts the config properly - release, err := Run(context.Background(), cfg, true) - if err != nil { - // Build might fail in test environment, but we still verify the error message - assert.Contains(t, err.Error(), "build") - } else { - assert.Equal(t, "v1.0.0", release.Version) - } - }) -} - -func TestRun_Bad(t *testing.T) { - t.Run("returns error when config is nil", func(t *testing.T) { - _, err := Run(context.Background(), nil, true) - assert.Error(t, err) - assert.Contains(t, err.Error(), "config is nil") - }) -} - -func TestRelease_Structure(t *testing.T) { - t.Run("Release struct holds expected fields", func(t *testing.T) { - release := &Release{ - Version: "v1.0.0", - Artifacts: []build.Artifact{{Path: "/path/to/artifact"}}, - Changelog: "## v1.0.0\n\nChanges", - ProjectDir: "/project", - } - - assert.Equal(t, "v1.0.0", release.Version) - assert.Len(t, release.Artifacts, 1) - assert.Contains(t, release.Changelog, "v1.0.0") - assert.Equal(t, "/project", release.ProjectDir) - }) -} - -func TestPublish_VersionFromGit(t *testing.T) { - t.Run("determines version from git when not set", func(t *testing.T) { - dir := setupPublishGitRepo(t) - createPublishCommit(t, dir, "feat: initial commit") - createPublishTag(t, dir, "v1.2.3") - - // Create dist directory with artifact - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app.tar.gz"), []byte("test"), 0644)) - - cfg := DefaultConfig() - cfg.SetProjectDir(dir) - // Don't set version - let it be determined from git - cfg.Publishers = nil - - release, err := Publish(context.Background(), cfg, true) - require.NoError(t, err) - - assert.Equal(t, "v1.2.3", release.Version) - }) -} - -func TestPublish_ChangelogGeneration(t *testing.T) { - t.Run("generates changelog from git commits when available", func(t *testing.T) { - dir := setupPublishGitRepo(t) - createPublishCommit(t, dir, "feat: add feature") - createPublishTag(t, dir, "v1.0.0") - createPublishCommit(t, dir, "fix: fix bug") - createPublishTag(t, dir, "v1.0.1") - - // Create dist directory with artifact - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app.tar.gz"), []byte("test"), 0644)) - - cfg := DefaultConfig() - cfg.SetProjectDir(dir) - cfg.SetVersion("v1.0.1") - cfg.Publishers = nil - - release, err := Publish(context.Background(), cfg, true) - require.NoError(t, err) - - // Changelog should contain either the commit message or the version - assert.Contains(t, release.Changelog, "v1.0.1") - }) - - t.Run("uses fallback changelog on error", func(t *testing.T) { - dir := t.TempDir() // Not a git repo - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app.tar.gz"), []byte("test"), 0644)) - - cfg := DefaultConfig() - cfg.SetProjectDir(dir) - cfg.SetVersion("v1.0.0") - cfg.Publishers = nil - - release, err := Publish(context.Background(), cfg, true) - require.NoError(t, err) - - // Should use fallback changelog - assert.Contains(t, release.Changelog, "Release v1.0.0") - }) -} - -func TestPublish_DefaultProjectDir(t *testing.T) { - t.Run("uses current directory when projectDir is empty", func(t *testing.T) { - // Create artifacts in current directory's dist folder - dir := t.TempDir() - distDir := filepath.Join(dir, "dist") - require.NoError(t, os.MkdirAll(distDir, 0755)) - require.NoError(t, os.WriteFile(filepath.Join(distDir, "app.tar.gz"), []byte("test"), 0644)) - - cfg := DefaultConfig() - cfg.SetProjectDir(dir) - cfg.SetVersion("v1.0.0") - cfg.Publishers = nil - - release, err := Publish(context.Background(), cfg, true) - require.NoError(t, err) - - assert.NotEmpty(t, release.ProjectDir) - }) -} - -// Helper functions for publish tests -func setupPublishGitRepo(t *testing.T) string { - t.Helper() - dir := t.TempDir() - - cmd := exec.Command("git", "init") - cmd.Dir = dir - require.NoError(t, cmd.Run()) - - cmd = exec.Command("git", "config", "user.email", "test@example.com") - cmd.Dir = dir - require.NoError(t, cmd.Run()) - - cmd = exec.Command("git", "config", "user.name", "Test User") - cmd.Dir = dir - require.NoError(t, cmd.Run()) - - return dir -} - -func createPublishCommit(t *testing.T, dir, message string) { - t.Helper() - - filePath := filepath.Join(dir, "publish_test.txt") - content, _ := os.ReadFile(filePath) - content = append(content, []byte(message+"\n")...) - require.NoError(t, os.WriteFile(filePath, content, 0644)) - - cmd := exec.Command("git", "add", ".") - cmd.Dir = dir - require.NoError(t, cmd.Run()) - - cmd = exec.Command("git", "commit", "-m", message) - cmd.Dir = dir - require.NoError(t, cmd.Run()) -} - -func createPublishTag(t *testing.T, dir, tag string) { - t.Helper() - cmd := exec.Command("git", "tag", tag) - cmd.Dir = dir - require.NoError(t, cmd.Run()) -} diff --git a/pkg/release/sdk.go b/pkg/release/sdk.go deleted file mode 100644 index 420e02f..0000000 --- a/pkg/release/sdk.go +++ /dev/null @@ -1,133 +0,0 @@ -// Package release provides release automation with changelog generation and publishing. -package release - -import ( - "context" - "fmt" - - "github.com/host-uk/core/pkg/sdk" -) - -// SDKRelease holds the result of an SDK release. -type SDKRelease struct { - // Version is the SDK version. - Version string - // Languages that were generated. - Languages []string - // Output directory. - Output string -} - -// RunSDK executes SDK-only release: diff check + generate. -// If dryRun is true, it shows what would be done without generating. -func RunSDK(ctx context.Context, cfg *Config, dryRun bool) (*SDKRelease, error) { - if cfg == nil { - return nil, fmt.Errorf("release.RunSDK: config is nil") - } - if cfg.SDK == nil { - return nil, fmt.Errorf("release.RunSDK: sdk not configured in .core/release.yaml") - } - - projectDir := cfg.projectDir - if projectDir == "" { - projectDir = "." - } - - // Determine version - version := cfg.version - if version == "" { - var err error - version, err = DetermineVersion(projectDir) - if err != nil { - return nil, fmt.Errorf("release.RunSDK: failed to determine version: %w", err) - } - } - - // Run diff check if enabled - if cfg.SDK.Diff.Enabled { - breaking, err := checkBreakingChanges(projectDir, cfg.SDK) - if err != nil { - // Non-fatal: warn and continue - fmt.Printf("Warning: diff check failed: %v\n", err) - } else if breaking { - if cfg.SDK.Diff.FailOnBreaking { - return nil, fmt.Errorf("release.RunSDK: breaking API changes detected") - } - fmt.Printf("Warning: breaking API changes detected\n") - } - } - - // Prepare result - output := cfg.SDK.Output - if output == "" { - output = "sdk" - } - - result := &SDKRelease{ - Version: version, - Languages: cfg.SDK.Languages, - Output: output, - } - - if dryRun { - return result, nil - } - - // Generate SDKs - sdkCfg := toSDKConfig(cfg.SDK) - s := sdk.New(projectDir, sdkCfg) - s.SetVersion(version) - - if err := s.Generate(ctx); err != nil { - return nil, fmt.Errorf("release.RunSDK: generation failed: %w", err) - } - - return result, nil -} - -// checkBreakingChanges runs oasdiff to detect breaking changes. -func checkBreakingChanges(projectDir string, cfg *SDKConfig) (bool, error) { - // Get previous tag for comparison (uses getPreviousTag from changelog.go) - prevTag, err := getPreviousTag(projectDir, "HEAD") - if err != nil { - return false, fmt.Errorf("no previous tag found: %w", err) - } - - // Detect spec path - specPath := cfg.Spec - if specPath == "" { - s := sdk.New(projectDir, nil) - specPath, err = s.DetectSpec() - if err != nil { - return false, err - } - } - - // Run diff - result, err := sdk.Diff(prevTag, specPath) - if err != nil { - return false, err - } - - return result.Breaking, nil -} - -// toSDKConfig converts release.SDKConfig to sdk.Config. -func toSDKConfig(cfg *SDKConfig) *sdk.Config { - if cfg == nil { - return nil - } - return &sdk.Config{ - Spec: cfg.Spec, - Languages: cfg.Languages, - Output: cfg.Output, - Package: sdk.PackageConfig{ - Name: cfg.Package.Name, - Version: cfg.Package.Version, - }, - Diff: sdk.DiffConfig{ - Enabled: cfg.Diff.Enabled, - FailOnBreaking: cfg.Diff.FailOnBreaking, - }, - } -} diff --git a/pkg/release/sdk_test.go b/pkg/release/sdk_test.go deleted file mode 100644 index f800beb..0000000 --- a/pkg/release/sdk_test.go +++ /dev/null @@ -1,229 +0,0 @@ -package release - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestRunSDK_Bad_NilConfig(t *testing.T) { - _, err := RunSDK(context.Background(), nil, true) - assert.Error(t, err) - assert.Contains(t, err.Error(), "config is nil") -} - -func TestRunSDK_Bad_NoSDKConfig(t *testing.T) { - cfg := &Config{ - SDK: nil, - } - cfg.projectDir = "/tmp" - - _, err := RunSDK(context.Background(), cfg, true) - assert.Error(t, err) - assert.Contains(t, err.Error(), "sdk not configured") -} - -func TestRunSDK_Good_DryRun(t *testing.T) { - cfg := &Config{ - SDK: &SDKConfig{ - Languages: []string{"typescript", "python"}, - Output: "sdk", - }, - } - cfg.projectDir = "/tmp" - cfg.version = "v1.0.0" - - result, err := RunSDK(context.Background(), cfg, true) - require.NoError(t, err) - - assert.Equal(t, "v1.0.0", result.Version) - assert.Len(t, result.Languages, 2) - assert.Contains(t, result.Languages, "typescript") - assert.Contains(t, result.Languages, "python") - assert.Equal(t, "sdk", result.Output) -} - -func TestRunSDK_Good_DryRunDefaultOutput(t *testing.T) { - cfg := &Config{ - SDK: &SDKConfig{ - Languages: []string{"go"}, - Output: "", // Empty output, should default to "sdk" - }, - } - cfg.projectDir = "/tmp" - cfg.version = "v2.0.0" - - result, err := RunSDK(context.Background(), cfg, true) - require.NoError(t, err) - - assert.Equal(t, "sdk", result.Output) -} - -func TestRunSDK_Good_DryRunDefaultProjectDir(t *testing.T) { - cfg := &Config{ - SDK: &SDKConfig{ - Languages: []string{"typescript"}, - Output: "out", - }, - } - // projectDir is empty, should default to "." - cfg.version = "v1.0.0" - - result, err := RunSDK(context.Background(), cfg, true) - require.NoError(t, err) - - assert.Equal(t, "v1.0.0", result.Version) -} - -func TestRunSDK_Bad_BreakingChangesFailOnBreaking(t *testing.T) { - // This test verifies that when diff.FailOnBreaking is true and breaking changes - // are detected, RunSDK returns an error. However, since we can't easily mock - // the diff check, this test verifies the config is correctly processed. - // The actual breaking change detection is tested in pkg/sdk/diff_test.go. - cfg := &Config{ - SDK: &SDKConfig{ - Languages: []string{"typescript"}, - Output: "sdk", - Diff: SDKDiffConfig{ - Enabled: true, - FailOnBreaking: true, - }, - }, - } - cfg.projectDir = "/tmp" - cfg.version = "v1.0.0" - - // In dry run mode with no git repo, diff check will fail gracefully - // (non-fatal warning), so this should succeed - result, err := RunSDK(context.Background(), cfg, true) - require.NoError(t, err) - assert.Equal(t, "v1.0.0", result.Version) -} - -func TestToSDKConfig_Good(t *testing.T) { - sdkCfg := &SDKConfig{ - Spec: "api/openapi.yaml", - Languages: []string{"typescript", "go"}, - Output: "sdk", - Package: SDKPackageConfig{ - Name: "myapi", - Version: "v1.0.0", - }, - Diff: SDKDiffConfig{ - Enabled: true, - FailOnBreaking: true, - }, - } - - result := toSDKConfig(sdkCfg) - - assert.Equal(t, "api/openapi.yaml", result.Spec) - assert.Equal(t, []string{"typescript", "go"}, result.Languages) - assert.Equal(t, "sdk", result.Output) - assert.Equal(t, "myapi", result.Package.Name) - assert.Equal(t, "v1.0.0", result.Package.Version) - assert.True(t, result.Diff.Enabled) - assert.True(t, result.Diff.FailOnBreaking) -} - -func TestToSDKConfig_Good_NilInput(t *testing.T) { - result := toSDKConfig(nil) - assert.Nil(t, result) -} - -func TestRunSDK_Good_WithDiffEnabledNoFailOnBreaking(t *testing.T) { - // Tests diff enabled but FailOnBreaking=false (should warn but not fail) - cfg := &Config{ - SDK: &SDKConfig{ - Languages: []string{"typescript"}, - Output: "sdk", - Diff: SDKDiffConfig{ - Enabled: true, - FailOnBreaking: false, - }, - }, - } - cfg.projectDir = "/tmp" - cfg.version = "v1.0.0" - - // Dry run should succeed even without git repo (diff check fails gracefully) - result, err := RunSDK(context.Background(), cfg, true) - require.NoError(t, err) - assert.Equal(t, "v1.0.0", result.Version) - assert.Contains(t, result.Languages, "typescript") -} - -func TestRunSDK_Good_MultipleLanguages(t *testing.T) { - // Tests multiple language support - cfg := &Config{ - SDK: &SDKConfig{ - Languages: []string{"typescript", "python", "go", "java"}, - Output: "multi-sdk", - }, - } - cfg.projectDir = "/tmp" - cfg.version = "v3.0.0" - - result, err := RunSDK(context.Background(), cfg, true) - require.NoError(t, err) - - assert.Equal(t, "v3.0.0", result.Version) - assert.Len(t, result.Languages, 4) - assert.Equal(t, "multi-sdk", result.Output) -} - -func TestRunSDK_Good_WithPackageConfig(t *testing.T) { - // Tests that package config is properly handled - cfg := &Config{ - SDK: &SDKConfig{ - Spec: "openapi.yaml", - Languages: []string{"typescript"}, - Output: "sdk", - Package: SDKPackageConfig{ - Name: "my-custom-sdk", - Version: "v2.5.0", - }, - }, - } - cfg.projectDir = "/tmp" - cfg.version = "v1.0.0" - - result, err := RunSDK(context.Background(), cfg, true) - require.NoError(t, err) - assert.Equal(t, "v1.0.0", result.Version) -} - -func TestToSDKConfig_Good_EmptyPackageConfig(t *testing.T) { - // Tests conversion with empty package config - sdkCfg := &SDKConfig{ - Languages: []string{"go"}, - Output: "sdk", - // Package is empty struct - } - - result := toSDKConfig(sdkCfg) - - assert.Equal(t, []string{"go"}, result.Languages) - assert.Equal(t, "sdk", result.Output) - assert.Empty(t, result.Package.Name) - assert.Empty(t, result.Package.Version) -} - -func TestToSDKConfig_Good_DiffDisabled(t *testing.T) { - // Tests conversion with diff disabled - sdkCfg := &SDKConfig{ - Languages: []string{"typescript"}, - Output: "sdk", - Diff: SDKDiffConfig{ - Enabled: false, - FailOnBreaking: false, - }, - } - - result := toSDKConfig(sdkCfg) - - assert.False(t, result.Diff.Enabled) - assert.False(t, result.Diff.FailOnBreaking) -} diff --git a/pkg/release/testdata/.core/release.yaml b/pkg/release/testdata/.core/release.yaml deleted file mode 100644 index b9c9fd7..0000000 --- a/pkg/release/testdata/.core/release.yaml +++ /dev/null @@ -1,35 +0,0 @@ -version: 1 - -project: - name: myapp - repository: owner/repo - -build: - targets: - - os: linux - arch: amd64 - - os: linux - arch: arm64 - - os: darwin - arch: amd64 - - os: darwin - arch: arm64 - - os: windows - arch: amd64 - -publishers: - - type: github - prerelease: false - draft: false - -changelog: - include: - - feat - - fix - - perf - exclude: - - chore - - docs - - style - - test - - ci diff --git a/pkg/release/version.go b/pkg/release/version.go deleted file mode 100644 index 335ced7..0000000 --- a/pkg/release/version.go +++ /dev/null @@ -1,195 +0,0 @@ -// Package release provides release automation with changelog generation and publishing. -package release - -import ( - "fmt" - "os/exec" - "regexp" - "strconv" - "strings" -) - -// semverRegex matches semantic version strings with or without 'v' prefix. -var semverRegex = regexp.MustCompile(`^v?(\d+)\.(\d+)\.(\d+)(?:-([a-zA-Z0-9.-]+))?(?:\+([a-zA-Z0-9.-]+))?$`) - -// DetermineVersion determines the version for a release. -// It checks in order: -// 1. Git tag on HEAD -// 2. Most recent tag + increment patch -// 3. Default to v0.0.1 if no tags exist -func DetermineVersion(dir string) (string, error) { - // Check if HEAD has a tag - headTag, err := getTagOnHead(dir) - if err == nil && headTag != "" { - return normalizeVersion(headTag), nil - } - - // Get most recent tag - latestTag, err := getLatestTag(dir) - if err != nil || latestTag == "" { - // No tags exist, return default - return "v0.0.1", nil - } - - // Increment patch version - return IncrementVersion(latestTag), nil -} - -// IncrementVersion increments the patch version of a semver string. -// Examples: -// - "v1.2.3" -> "v1.2.4" -// - "1.2.3" -> "v1.2.4" -// - "v1.2.3-alpha" -> "v1.2.4" (strips prerelease) -func IncrementVersion(current string) string { - matches := semverRegex.FindStringSubmatch(current) - if matches == nil { - // Not a valid semver, return as-is with increment suffix - return current + ".1" - } - - major, _ := strconv.Atoi(matches[1]) - minor, _ := strconv.Atoi(matches[2]) - patch, _ := strconv.Atoi(matches[3]) - - // Increment patch - patch++ - - return fmt.Sprintf("v%d.%d.%d", major, minor, patch) -} - -// IncrementMinor increments the minor version of a semver string. -// Examples: -// - "v1.2.3" -> "v1.3.0" -// - "1.2.3" -> "v1.3.0" -func IncrementMinor(current string) string { - matches := semverRegex.FindStringSubmatch(current) - if matches == nil { - return current + ".1" - } - - major, _ := strconv.Atoi(matches[1]) - minor, _ := strconv.Atoi(matches[2]) - - // Increment minor, reset patch - minor++ - - return fmt.Sprintf("v%d.%d.0", major, minor) -} - -// IncrementMajor increments the major version of a semver string. -// Examples: -// - "v1.2.3" -> "v2.0.0" -// - "1.2.3" -> "v2.0.0" -func IncrementMajor(current string) string { - matches := semverRegex.FindStringSubmatch(current) - if matches == nil { - return current + ".1" - } - - major, _ := strconv.Atoi(matches[1]) - - // Increment major, reset minor and patch - major++ - - return fmt.Sprintf("v%d.0.0", major) -} - -// ParseVersion parses a semver string into its components. -// Returns (major, minor, patch, prerelease, build, error). -func ParseVersion(version string) (int, int, int, string, string, error) { - matches := semverRegex.FindStringSubmatch(version) - if matches == nil { - return 0, 0, 0, "", "", fmt.Errorf("invalid semver: %s", version) - } - - major, _ := strconv.Atoi(matches[1]) - minor, _ := strconv.Atoi(matches[2]) - patch, _ := strconv.Atoi(matches[3]) - prerelease := matches[4] - build := matches[5] - - return major, minor, patch, prerelease, build, nil -} - -// ValidateVersion checks if a string is a valid semver. -func ValidateVersion(version string) bool { - return semverRegex.MatchString(version) -} - -// normalizeVersion ensures the version starts with 'v'. -func normalizeVersion(version string) string { - if !strings.HasPrefix(version, "v") { - return "v" + version - } - return version -} - -// getTagOnHead returns the tag on HEAD, if any. -func getTagOnHead(dir string) (string, error) { - cmd := exec.Command("git", "describe", "--tags", "--exact-match", "HEAD") - cmd.Dir = dir - output, err := cmd.Output() - if err != nil { - return "", err - } - return strings.TrimSpace(string(output)), nil -} - -// getLatestTag returns the most recent tag in the repository. -func getLatestTag(dir string) (string, error) { - cmd := exec.Command("git", "describe", "--tags", "--abbrev=0") - cmd.Dir = dir - output, err := cmd.Output() - if err != nil { - return "", err - } - return strings.TrimSpace(string(output)), nil -} - -// CompareVersions compares two semver strings. -// Returns: -// -// -1 if a < b -// 0 if a == b -// 1 if a > b -func CompareVersions(a, b string) int { - aMajor, aMinor, aPatch, _, _, errA := ParseVersion(a) - bMajor, bMinor, bPatch, _, _, errB := ParseVersion(b) - - // Invalid versions are considered less than valid ones - if errA != nil && errB != nil { - return strings.Compare(a, b) - } - if errA != nil { - return -1 - } - if errB != nil { - return 1 - } - - // Compare major - if aMajor != bMajor { - if aMajor < bMajor { - return -1 - } - return 1 - } - - // Compare minor - if aMinor != bMinor { - if aMinor < bMinor { - return -1 - } - return 1 - } - - // Compare patch - if aPatch != bPatch { - if aPatch < bPatch { - return -1 - } - return 1 - } - - return 0 -} diff --git a/pkg/release/version_test.go b/pkg/release/version_test.go deleted file mode 100644 index a829929..0000000 --- a/pkg/release/version_test.go +++ /dev/null @@ -1,520 +0,0 @@ -package release - -import ( - "os" - "os/exec" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// setupGitRepo creates a temporary directory with an initialized git repository. -func setupGitRepo(t *testing.T) string { - t.Helper() - dir := t.TempDir() - - // Initialize git repo - cmd := exec.Command("git", "init") - cmd.Dir = dir - require.NoError(t, cmd.Run()) - - // Configure git user for commits - cmd = exec.Command("git", "config", "user.email", "test@example.com") - cmd.Dir = dir - require.NoError(t, cmd.Run()) - - cmd = exec.Command("git", "config", "user.name", "Test User") - cmd.Dir = dir - require.NoError(t, cmd.Run()) - - return dir -} - -// createCommit creates a commit in the given directory. -func createCommit(t *testing.T, dir, message string) { - t.Helper() - - // Create or modify a file - filePath := filepath.Join(dir, "test.txt") - content, _ := os.ReadFile(filePath) - content = append(content, []byte(message+"\n")...) - require.NoError(t, os.WriteFile(filePath, content, 0644)) - - // Stage and commit - cmd := exec.Command("git", "add", ".") - cmd.Dir = dir - require.NoError(t, cmd.Run()) - - cmd = exec.Command("git", "commit", "-m", message) - cmd.Dir = dir - require.NoError(t, cmd.Run()) -} - -// createTag creates a tag in the given directory. -func createTag(t *testing.T, dir, tag string) { - t.Helper() - cmd := exec.Command("git", "tag", tag) - cmd.Dir = dir - require.NoError(t, cmd.Run()) -} - -func TestDetermineVersion_Good(t *testing.T) { - t.Run("returns tag when HEAD has tag", func(t *testing.T) { - dir := setupGitRepo(t) - createCommit(t, dir, "feat: initial commit") - createTag(t, dir, "v1.0.0") - - version, err := DetermineVersion(dir) - require.NoError(t, err) - assert.Equal(t, "v1.0.0", version) - }) - - t.Run("normalizes tag without v prefix", func(t *testing.T) { - dir := setupGitRepo(t) - createCommit(t, dir, "feat: initial commit") - createTag(t, dir, "1.0.0") - - version, err := DetermineVersion(dir) - require.NoError(t, err) - assert.Equal(t, "v1.0.0", version) - }) - - t.Run("increments patch when commits after tag", func(t *testing.T) { - dir := setupGitRepo(t) - createCommit(t, dir, "feat: initial commit") - createTag(t, dir, "v1.0.0") - createCommit(t, dir, "feat: new feature") - - version, err := DetermineVersion(dir) - require.NoError(t, err) - assert.Equal(t, "v1.0.1", version) - }) - - t.Run("returns v0.0.1 when no tags exist", func(t *testing.T) { - dir := setupGitRepo(t) - createCommit(t, dir, "feat: initial commit") - - version, err := DetermineVersion(dir) - require.NoError(t, err) - assert.Equal(t, "v0.0.1", version) - }) - - t.Run("handles multiple tags with increments", func(t *testing.T) { - dir := setupGitRepo(t) - createCommit(t, dir, "feat: first") - createTag(t, dir, "v1.0.0") - createCommit(t, dir, "feat: second") - createTag(t, dir, "v1.0.1") - createCommit(t, dir, "feat: third") - - version, err := DetermineVersion(dir) - require.NoError(t, err) - assert.Equal(t, "v1.0.2", version) - }) -} - -func TestDetermineVersion_Bad(t *testing.T) { - t.Run("returns v0.0.1 for empty repo", func(t *testing.T) { - dir := setupGitRepo(t) - - // No commits, git describe will fail - version, err := DetermineVersion(dir) - require.NoError(t, err) - assert.Equal(t, "v0.0.1", version) - }) -} - -func TestGetTagOnHead_Good(t *testing.T) { - t.Run("returns tag when HEAD has tag", func(t *testing.T) { - dir := setupGitRepo(t) - createCommit(t, dir, "feat: initial commit") - createTag(t, dir, "v1.2.3") - - tag, err := getTagOnHead(dir) - require.NoError(t, err) - assert.Equal(t, "v1.2.3", tag) - }) - - t.Run("returns latest tag when multiple tags on HEAD", func(t *testing.T) { - dir := setupGitRepo(t) - createCommit(t, dir, "feat: initial commit") - createTag(t, dir, "v1.0.0") - createTag(t, dir, "v1.0.0-beta") - - tag, err := getTagOnHead(dir) - require.NoError(t, err) - // Git returns one of the tags - assert.Contains(t, []string{"v1.0.0", "v1.0.0-beta"}, tag) - }) -} - -func TestGetTagOnHead_Bad(t *testing.T) { - t.Run("returns error when HEAD has no tag", func(t *testing.T) { - dir := setupGitRepo(t) - createCommit(t, dir, "feat: initial commit") - - _, err := getTagOnHead(dir) - assert.Error(t, err) - }) - - t.Run("returns error when commits after tag", func(t *testing.T) { - dir := setupGitRepo(t) - createCommit(t, dir, "feat: initial commit") - createTag(t, dir, "v1.0.0") - createCommit(t, dir, "feat: new feature") - - _, err := getTagOnHead(dir) - assert.Error(t, err) - }) -} - -func TestGetLatestTag_Good(t *testing.T) { - t.Run("returns latest tag", func(t *testing.T) { - dir := setupGitRepo(t) - createCommit(t, dir, "feat: initial commit") - createTag(t, dir, "v1.0.0") - - tag, err := getLatestTag(dir) - require.NoError(t, err) - assert.Equal(t, "v1.0.0", tag) - }) - - t.Run("returns most recent tag after multiple commits", func(t *testing.T) { - dir := setupGitRepo(t) - createCommit(t, dir, "feat: first") - createTag(t, dir, "v1.0.0") - createCommit(t, dir, "feat: second") - createTag(t, dir, "v1.1.0") - createCommit(t, dir, "feat: third") - - tag, err := getLatestTag(dir) - require.NoError(t, err) - assert.Equal(t, "v1.1.0", tag) - }) -} - -func TestGetLatestTag_Bad(t *testing.T) { - t.Run("returns error when no tags exist", func(t *testing.T) { - dir := setupGitRepo(t) - createCommit(t, dir, "feat: initial commit") - - _, err := getLatestTag(dir) - assert.Error(t, err) - }) - - t.Run("returns error for empty repo", func(t *testing.T) { - dir := setupGitRepo(t) - - _, err := getLatestTag(dir) - assert.Error(t, err) - }) -} - -func TestIncrementMinor_Bad(t *testing.T) { - t.Run("returns fallback for invalid version", func(t *testing.T) { - result := IncrementMinor("not-valid") - assert.Equal(t, "not-valid.1", result) - }) -} - -func TestIncrementMajor_Bad(t *testing.T) { - t.Run("returns fallback for invalid version", func(t *testing.T) { - result := IncrementMajor("not-valid") - assert.Equal(t, "not-valid.1", result) - }) -} - -func TestCompareVersions_Ugly(t *testing.T) { - t.Run("handles both invalid versions", func(t *testing.T) { - result := CompareVersions("invalid-a", "invalid-b") - // Should do string comparison for invalid versions - assert.Equal(t, -1, result) // "invalid-a" < "invalid-b" - }) - - t.Run("invalid a returns -1", func(t *testing.T) { - result := CompareVersions("invalid", "v1.0.0") - assert.Equal(t, -1, result) - }) - - t.Run("invalid b returns 1", func(t *testing.T) { - result := CompareVersions("v1.0.0", "invalid") - assert.Equal(t, 1, result) - }) -} - -func TestIncrementVersion_Good(t *testing.T) { - tests := []struct { - name string - input string - expected string - }{ - { - name: "increment patch with v prefix", - input: "v1.2.3", - expected: "v1.2.4", - }, - { - name: "increment patch without v prefix", - input: "1.2.3", - expected: "v1.2.4", - }, - { - name: "increment from zero", - input: "v0.0.0", - expected: "v0.0.1", - }, - { - name: "strips prerelease", - input: "v1.2.3-alpha", - expected: "v1.2.4", - }, - { - name: "strips build metadata", - input: "v1.2.3+build123", - expected: "v1.2.4", - }, - { - name: "strips prerelease and build", - input: "v1.2.3-beta.1+build456", - expected: "v1.2.4", - }, - { - name: "handles large numbers", - input: "v10.20.99", - expected: "v10.20.100", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result := IncrementVersion(tc.input) - assert.Equal(t, tc.expected, result) - }) - } -} - -func TestIncrementVersion_Bad(t *testing.T) { - t.Run("invalid semver returns original with suffix", func(t *testing.T) { - result := IncrementVersion("not-a-version") - assert.Equal(t, "not-a-version.1", result) - }) -} - -func TestIncrementMinor_Good(t *testing.T) { - tests := []struct { - name string - input string - expected string - }{ - { - name: "increment minor resets patch", - input: "v1.2.3", - expected: "v1.3.0", - }, - { - name: "increment minor from zero", - input: "v1.0.5", - expected: "v1.1.0", - }, - { - name: "handles large numbers", - input: "v5.99.50", - expected: "v5.100.0", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result := IncrementMinor(tc.input) - assert.Equal(t, tc.expected, result) - }) - } -} - -func TestIncrementMajor_Good(t *testing.T) { - tests := []struct { - name string - input string - expected string - }{ - { - name: "increment major resets minor and patch", - input: "v1.2.3", - expected: "v2.0.0", - }, - { - name: "increment major from zero", - input: "v0.5.10", - expected: "v1.0.0", - }, - { - name: "handles large numbers", - input: "v99.50.25", - expected: "v100.0.0", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result := IncrementMajor(tc.input) - assert.Equal(t, tc.expected, result) - }) - } -} - -func TestParseVersion_Good(t *testing.T) { - tests := []struct { - name string - input string - major int - minor int - patch int - prerelease string - build string - }{ - { - name: "simple version with v", - input: "v1.2.3", - major: 1, minor: 2, patch: 3, - }, - { - name: "simple version without v", - input: "1.2.3", - major: 1, minor: 2, patch: 3, - }, - { - name: "with prerelease", - input: "v1.2.3-alpha", - major: 1, minor: 2, patch: 3, - prerelease: "alpha", - }, - { - name: "with prerelease and build", - input: "v1.2.3-beta.1+build.456", - major: 1, minor: 2, patch: 3, - prerelease: "beta.1", - build: "build.456", - }, - { - name: "with build only", - input: "v1.2.3+sha.abc123", - major: 1, minor: 2, patch: 3, - build: "sha.abc123", - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - major, minor, patch, prerelease, build, err := ParseVersion(tc.input) - assert.NoError(t, err) - assert.Equal(t, tc.major, major) - assert.Equal(t, tc.minor, minor) - assert.Equal(t, tc.patch, patch) - assert.Equal(t, tc.prerelease, prerelease) - assert.Equal(t, tc.build, build) - }) - } -} - -func TestParseVersion_Bad(t *testing.T) { - tests := []struct { - name string - input string - }{ - {"empty string", ""}, - {"not a version", "not-a-version"}, - {"missing minor", "v1"}, - {"missing patch", "v1.2"}, - {"letters in version", "v1.2.x"}, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - _, _, _, _, _, err := ParseVersion(tc.input) - assert.Error(t, err) - }) - } -} - -func TestValidateVersion_Good(t *testing.T) { - validVersions := []string{ - "v1.0.0", - "1.0.0", - "v0.0.1", - "v10.20.30", - "v1.2.3-alpha", - "v1.2.3+build", - "v1.2.3-alpha.1+build.123", - } - - for _, v := range validVersions { - t.Run(v, func(t *testing.T) { - assert.True(t, ValidateVersion(v)) - }) - } -} - -func TestValidateVersion_Bad(t *testing.T) { - invalidVersions := []string{ - "", - "v1", - "v1.2", - "1.2", - "not-a-version", - "v1.2.x", - "version1.0.0", - } - - for _, v := range invalidVersions { - t.Run(v, func(t *testing.T) { - assert.False(t, ValidateVersion(v)) - }) - } -} - -func TestCompareVersions_Good(t *testing.T) { - tests := []struct { - name string - a string - b string - expected int - }{ - {"equal versions", "v1.0.0", "v1.0.0", 0}, - {"a less than b major", "v1.0.0", "v2.0.0", -1}, - {"a greater than b major", "v2.0.0", "v1.0.0", 1}, - {"a less than b minor", "v1.1.0", "v1.2.0", -1}, - {"a greater than b minor", "v1.2.0", "v1.1.0", 1}, - {"a less than b patch", "v1.0.1", "v1.0.2", -1}, - {"a greater than b patch", "v1.0.2", "v1.0.1", 1}, - {"with and without v prefix", "v1.0.0", "1.0.0", 0}, - {"different scales", "v1.10.0", "v1.9.0", 1}, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - result := CompareVersions(tc.a, tc.b) - assert.Equal(t, tc.expected, result) - }) - } -} - -func TestNormalizeVersion_Good(t *testing.T) { - tests := []struct { - input string - expected string - }{ - {"1.0.0", "v1.0.0"}, - {"v1.0.0", "v1.0.0"}, - {"0.0.1", "v0.0.1"}, - {"v10.20.30", "v10.20.30"}, - } - - for _, tc := range tests { - t.Run(tc.input, func(t *testing.T) { - result := normalizeVersion(tc.input) - assert.Equal(t, tc.expected, result) - }) - } -} diff --git a/pkg/repos/registry.go b/pkg/repos/registry.go deleted file mode 100644 index 3ae5d8c..0000000 --- a/pkg/repos/registry.go +++ /dev/null @@ -1,311 +0,0 @@ -// Package repos provides functionality for managing multi-repo workspaces. -// It reads a repos.yaml registry file that defines repositories, their types, -// dependencies, and metadata. -package repos - -import ( - "fmt" - "os" - "path/filepath" - "strings" - - "gopkg.in/yaml.v3" -) - -// Registry represents a collection of repositories defined in repos.yaml. -type Registry struct { - Version int `yaml:"version"` - Org string `yaml:"org"` - BasePath string `yaml:"base_path"` - Repos map[string]*Repo `yaml:"repos"` - Defaults RegistryDefaults `yaml:"defaults"` -} - -// RegistryDefaults contains default values applied to all repos. -type RegistryDefaults struct { - CI string `yaml:"ci"` - License string `yaml:"license"` - Branch string `yaml:"branch"` -} - -// RepoType indicates the role of a repository in the ecosystem. -type RepoType string - -const ( - RepoTypeFoundation RepoType = "foundation" - RepoTypeModule RepoType = "module" - RepoTypeProduct RepoType = "product" - RepoTypeTemplate RepoType = "template" -) - -// Repo represents a single repository in the registry. -type Repo struct { - Name string `yaml:"-"` // Set from map key - Type string `yaml:"type"` - DependsOn []string `yaml:"depends_on"` - Description string `yaml:"description"` - Docs bool `yaml:"docs"` - CI string `yaml:"ci"` - Domain string `yaml:"domain,omitempty"` - Clone *bool `yaml:"clone,omitempty"` // nil = true, false = skip cloning - - // Computed fields - Path string `yaml:"-"` // Full path to repo directory -} - -// LoadRegistry reads and parses a repos.yaml file. -func LoadRegistry(path string) (*Registry, error) { - data, err := os.ReadFile(path) - if err != nil { - return nil, fmt.Errorf("failed to read registry file: %w", err) - } - - var reg Registry - if err := yaml.Unmarshal(data, ®); err != nil { - return nil, fmt.Errorf("failed to parse registry file: %w", err) - } - - // Expand base path - reg.BasePath = expandPath(reg.BasePath) - - // Set computed fields on each repo - for name, repo := range reg.Repos { - repo.Name = name - repo.Path = filepath.Join(reg.BasePath, name) - - // Apply defaults if not set - if repo.CI == "" { - repo.CI = reg.Defaults.CI - } - } - - return ®, nil -} - -// FindRegistry searches for repos.yaml in common locations. -// It checks: current directory, parent directories, and home directory. -func FindRegistry() (string, error) { - // Check current directory and parents - dir, err := os.Getwd() - if err != nil { - return "", err - } - - for { - candidate := filepath.Join(dir, "repos.yaml") - if _, err := os.Stat(candidate); err == nil { - return candidate, nil - } - - parent := filepath.Dir(dir) - if parent == dir { - break - } - dir = parent - } - - // Check home directory common locations - home, err := os.UserHomeDir() - if err != nil { - return "", err - } - - commonPaths := []string{ - filepath.Join(home, "Code", "host-uk", "repos.yaml"), - filepath.Join(home, ".config", "core", "repos.yaml"), - } - - for _, p := range commonPaths { - if _, err := os.Stat(p); err == nil { - return p, nil - } - } - - return "", fmt.Errorf("repos.yaml not found") -} - -// ScanDirectory creates a Registry by scanning a directory for git repos. -// This is used as a fallback when no repos.yaml is found. -func ScanDirectory(dir string) (*Registry, error) { - entries, err := os.ReadDir(dir) - if err != nil { - return nil, fmt.Errorf("failed to read directory: %w", err) - } - - reg := &Registry{ - Version: 1, - BasePath: dir, - Repos: make(map[string]*Repo), - } - - // Try to detect org from git remote - for _, entry := range entries { - if !entry.IsDir() { - continue - } - - repoPath := filepath.Join(dir, entry.Name()) - gitPath := filepath.Join(repoPath, ".git") - - if _, err := os.Stat(gitPath); err != nil { - continue // Not a git repo - } - - repo := &Repo{ - Name: entry.Name(), - Path: repoPath, - Type: "module", // Default type - } - - reg.Repos[entry.Name()] = repo - - // Try to detect org from first repo's remote - if reg.Org == "" { - reg.Org = detectOrg(repoPath) - } - } - - return reg, nil -} - -// detectOrg tries to extract the GitHub org from a repo's origin remote. -func detectOrg(repoPath string) string { - // Try to read git remote - cmd := filepath.Join(repoPath, ".git", "config") - data, err := os.ReadFile(cmd) - if err != nil { - return "" - } - - // Simple parse for github.com URLs - content := string(data) - // Look for patterns like github.com:org/repo or github.com/org/repo - for _, line := range strings.Split(content, "\n") { - line = strings.TrimSpace(line) - if !strings.HasPrefix(line, "url = ") { - continue - } - url := strings.TrimPrefix(line, "url = ") - - // git@github.com:org/repo.git - if strings.Contains(url, "github.com:") { - parts := strings.Split(url, ":") - if len(parts) >= 2 { - orgRepo := strings.TrimSuffix(parts[1], ".git") - orgParts := strings.Split(orgRepo, "/") - if len(orgParts) >= 1 { - return orgParts[0] - } - } - } - - // https://github.com/org/repo.git - if strings.Contains(url, "github.com/") { - parts := strings.Split(url, "github.com/") - if len(parts) >= 2 { - orgRepo := strings.TrimSuffix(parts[1], ".git") - orgParts := strings.Split(orgRepo, "/") - if len(orgParts) >= 1 { - return orgParts[0] - } - } - } - } - - return "" -} - -// List returns all repos in the registry. -func (r *Registry) List() []*Repo { - repos := make([]*Repo, 0, len(r.Repos)) - for _, repo := range r.Repos { - repos = repos - repos = append(repos, repo) - } - return repos -} - -// Get returns a repo by name. -func (r *Registry) Get(name string) (*Repo, bool) { - repo, ok := r.Repos[name] - return repo, ok -} - -// ByType returns repos filtered by type. -func (r *Registry) ByType(t string) []*Repo { - var repos []*Repo - for _, repo := range r.Repos { - if repo.Type == t { - repos = append(repos, repo) - } - } - return repos -} - -// TopologicalOrder returns repos sorted by dependency order. -// Foundation repos come first, then modules, then products. -func (r *Registry) TopologicalOrder() ([]*Repo, error) { - // Build dependency graph - visited := make(map[string]bool) - visiting := make(map[string]bool) - var result []*Repo - - var visit func(name string) error - visit = func(name string) error { - if visited[name] { - return nil - } - if visiting[name] { - return fmt.Errorf("circular dependency detected: %s", name) - } - - repo, ok := r.Repos[name] - if !ok { - return fmt.Errorf("unknown repo: %s", name) - } - - visiting[name] = true - for _, dep := range repo.DependsOn { - if err := visit(dep); err != nil { - return err - } - } - visiting[name] = false - visited[name] = true - result = append(result, repo) - return nil - } - - for name := range r.Repos { - if err := visit(name); err != nil { - return nil, err - } - } - - return result, nil -} - -// Exists checks if the repo directory exists on disk. -func (repo *Repo) Exists() bool { - info, err := os.Stat(repo.Path) - return err == nil && info.IsDir() -} - -// IsGitRepo checks if the repo directory contains a .git folder. -func (repo *Repo) IsGitRepo() bool { - gitPath := filepath.Join(repo.Path, ".git") - info, err := os.Stat(gitPath) - return err == nil && info.IsDir() -} - -// expandPath expands ~ to home directory. -func expandPath(path string) string { - if strings.HasPrefix(path, "~/") { - home, err := os.UserHomeDir() - if err != nil { - return path - } - return filepath.Join(home, path[2:]) - } - return path -} \ No newline at end of file diff --git a/pkg/sdk/cmd_commands.go b/pkg/sdk/cmd_commands.go deleted file mode 100644 index d0b5ecc..0000000 --- a/pkg/sdk/cmd_commands.go +++ /dev/null @@ -1,8 +0,0 @@ -// SDK validation and API compatibility commands. -// -// Commands: -// - diff: Check for breaking API changes between spec versions -// - validate: Validate OpenAPI spec syntax -// -// Configuration via .core/sdk.yaml. For SDK generation, use: core build sdk -package sdk diff --git a/pkg/sdk/cmd_sdk.go b/pkg/sdk/cmd_sdk.go deleted file mode 100644 index 1854ef1..0000000 --- a/pkg/sdk/cmd_sdk.go +++ /dev/null @@ -1,134 +0,0 @@ -package sdk - -import ( - "errors" - "fmt" - "os" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -func init() { - cli.RegisterCommands(AddSDKCommands) -} - -// SDK styles (aliases to shared) -var ( - sdkHeaderStyle = cli.TitleStyle - sdkSuccessStyle = cli.SuccessStyle - sdkErrorStyle = cli.ErrorStyle - sdkDimStyle = cli.DimStyle -) - -var sdkCmd = &cobra.Command{ - Use: "sdk", - Short: i18n.T("cmd.sdk.short"), - Long: i18n.T("cmd.sdk.long"), -} - -var diffBasePath string -var diffSpecPath string - -var sdkDiffCmd = &cobra.Command{ - Use: "diff", - Short: i18n.T("cmd.sdk.diff.short"), - Long: i18n.T("cmd.sdk.diff.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return runSDKDiff(diffBasePath, diffSpecPath) - }, -} - -var validateSpecPath string - -var sdkValidateCmd = &cobra.Command{ - Use: "validate", - Short: i18n.T("cmd.sdk.validate.short"), - Long: i18n.T("cmd.sdk.validate.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return runSDKValidate(validateSpecPath) - }, -} - -func initSDKCommands() { - // sdk diff flags - sdkDiffCmd.Flags().StringVar(&diffBasePath, "base", "", i18n.T("cmd.sdk.diff.flag.base")) - sdkDiffCmd.Flags().StringVar(&diffSpecPath, "spec", "", i18n.T("cmd.sdk.diff.flag.spec")) - - // sdk validate flags - sdkValidateCmd.Flags().StringVar(&validateSpecPath, "spec", "", i18n.T("common.flag.spec")) - - // Add subcommands - sdkCmd.AddCommand(sdkDiffCmd) - sdkCmd.AddCommand(sdkValidateCmd) -} - -// AddSDKCommands registers the 'sdk' command and all subcommands. -func AddSDKCommands(root *cobra.Command) { - initSDKCommands() - root.AddCommand(sdkCmd) -} - -func runSDKDiff(basePath, specPath string) error { - projectDir, err := os.Getwd() - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - // Detect current spec if not provided - if specPath == "" { - s := New(projectDir, nil) - specPath, err = s.DetectSpec() - if err != nil { - return err - } - } - - if basePath == "" { - return errors.New(i18n.T("cmd.sdk.diff.error.base_required")) - } - - fmt.Printf("%s %s\n", sdkHeaderStyle.Render(i18n.T("cmd.sdk.diff.label")), i18n.ProgressSubject("check", "breaking changes")) - fmt.Printf(" %s %s\n", i18n.T("cmd.sdk.diff.base_label"), sdkDimStyle.Render(basePath)) - fmt.Printf(" %s %s\n", i18n.Label("current"), sdkDimStyle.Render(specPath)) - fmt.Println() - - result, err := Diff(basePath, specPath) - if err != nil { - fmt.Printf("%s %v\n", sdkErrorStyle.Render(i18n.Label("error")), err) - os.Exit(2) - } - - if result.Breaking { - fmt.Printf("%s %s\n", sdkErrorStyle.Render(i18n.T("cmd.sdk.diff.breaking")), result.Summary) - for _, change := range result.Changes { - fmt.Printf(" - %s\n", change) - } - os.Exit(1) - } - - fmt.Printf("%s %s\n", sdkSuccessStyle.Render(i18n.T("cmd.sdk.label.ok")), result.Summary) - return nil -} - -func runSDKValidate(specPath string) error { - projectDir, err := os.Getwd() - if err != nil { - return fmt.Errorf("%s: %w", i18n.T("i18n.fail.get", "working directory"), err) - } - - s := New(projectDir, &Config{Spec: specPath}) - - fmt.Printf("%s %s\n", sdkHeaderStyle.Render(i18n.T("cmd.sdk.label.sdk")), i18n.T("cmd.sdk.validate.validating")) - - detectedPath, err := s.DetectSpec() - if err != nil { - fmt.Printf("%s %v\n", sdkErrorStyle.Render(i18n.Label("error")), err) - return err - } - - fmt.Printf(" %s %s\n", i18n.Label("spec"), sdkDimStyle.Render(detectedPath)) - fmt.Printf("%s %s\n", sdkSuccessStyle.Render(i18n.T("cmd.sdk.label.ok")), i18n.T("cmd.sdk.validate.valid")) - return nil -} diff --git a/pkg/sdk/detect.go b/pkg/sdk/detect.go deleted file mode 100644 index aeb221f..0000000 --- a/pkg/sdk/detect.go +++ /dev/null @@ -1,78 +0,0 @@ -package sdk - -import ( - "fmt" - "os" - "path/filepath" - "strings" -) - -// commonSpecPaths are checked in order when no spec is configured. -var commonSpecPaths = []string{ - "api/openapi.yaml", - "api/openapi.json", - "openapi.yaml", - "openapi.json", - "docs/api.yaml", - "docs/api.json", - "swagger.yaml", - "swagger.json", -} - -// DetectSpec finds the OpenAPI spec file. -// Priority: config path -> common paths -> Laravel Scramble. -func (s *SDK) DetectSpec() (string, error) { - // 1. Check configured path - if s.config.Spec != "" { - specPath := filepath.Join(s.projectDir, s.config.Spec) - if _, err := os.Stat(specPath); err == nil { - return specPath, nil - } - return "", fmt.Errorf("sdk.DetectSpec: configured spec not found: %s", s.config.Spec) - } - - // 2. Check common paths - for _, p := range commonSpecPaths { - specPath := filepath.Join(s.projectDir, p) - if _, err := os.Stat(specPath); err == nil { - return specPath, nil - } - } - - // 3. Try Laravel Scramble detection - specPath, err := s.detectScramble() - if err == nil { - return specPath, nil - } - - return "", fmt.Errorf("sdk.DetectSpec: no OpenAPI spec found (checked config, common paths, Scramble)") -} - -// detectScramble checks for Laravel Scramble and exports the spec. -func (s *SDK) detectScramble() (string, error) { - composerPath := filepath.Join(s.projectDir, "composer.json") - if _, err := os.Stat(composerPath); err != nil { - return "", fmt.Errorf("no composer.json") - } - - // Check for scramble in composer.json - data, err := os.ReadFile(composerPath) - if err != nil { - return "", err - } - - // Simple check for scramble package - if !containsScramble(data) { - return "", fmt.Errorf("scramble not found in composer.json") - } - - // TODO: Run php artisan scramble:export - return "", fmt.Errorf("scramble export not implemented") -} - -// containsScramble checks if composer.json includes scramble. -func containsScramble(data []byte) bool { - content := string(data) - return strings.Contains(content, "dedoc/scramble") || - strings.Contains(content, "\"scramble\"") -} diff --git a/pkg/sdk/detect_test.go b/pkg/sdk/detect_test.go deleted file mode 100644 index 15aa89f..0000000 --- a/pkg/sdk/detect_test.go +++ /dev/null @@ -1,87 +0,0 @@ -package sdk - -import ( - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestDetectSpec_Good_ConfigPath(t *testing.T) { - tmpDir := t.TempDir() - specPath := filepath.Join(tmpDir, "api", "spec.yaml") - err := os.MkdirAll(filepath.Dir(specPath), 0755) - require.NoError(t, err) - err = os.WriteFile(specPath, []byte("openapi: 3.0.0"), 0644) - require.NoError(t, err) - - sdk := New(tmpDir, &Config{Spec: "api/spec.yaml"}) - got, err := sdk.DetectSpec() - assert.NoError(t, err) - assert.Equal(t, specPath, got) -} - -func TestDetectSpec_Good_CommonPath(t *testing.T) { - tmpDir := t.TempDir() - specPath := filepath.Join(tmpDir, "openapi.yaml") - err := os.WriteFile(specPath, []byte("openapi: 3.0.0"), 0644) - require.NoError(t, err) - - sdk := New(tmpDir, nil) - got, err := sdk.DetectSpec() - assert.NoError(t, err) - assert.Equal(t, specPath, got) -} - -func TestDetectSpec_Bad_NotFound(t *testing.T) { - tmpDir := t.TempDir() - sdk := New(tmpDir, nil) - _, err := sdk.DetectSpec() - assert.Error(t, err) - assert.Contains(t, err.Error(), "no OpenAPI spec found") -} - -func TestDetectSpec_Bad_ConfigNotFound(t *testing.T) { - tmpDir := t.TempDir() - sdk := New(tmpDir, &Config{Spec: "non-existent.yaml"}) - _, err := sdk.DetectSpec() - assert.Error(t, err) - assert.Contains(t, err.Error(), "configured spec not found") -} - -func TestContainsScramble(t *testing.T) { - tests := []struct { - data string - expected bool - }{ - {`{"require": {"dedoc/scramble": "^0.1"}}`, true}, - {`{"require": {"scramble": "^0.1"}}`, true}, - {`{"require": {"laravel/framework": "^11.0"}}`, false}, - } - - for _, tt := range tests { - assert.Equal(t, tt.expected, containsScramble([]byte(tt.data))) - } -} - -func TestDetectScramble_Bad(t *testing.T) { - t.Run("no composer.json", func(t *testing.T) { - sdk := New(t.TempDir(), nil) - _, err := sdk.detectScramble() - assert.Error(t, err) - assert.Contains(t, err.Error(), "no composer.json") - }) - - t.Run("no scramble in composer.json", func(t *testing.T) { - tmpDir := t.TempDir() - err := os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{}`), 0644) - require.NoError(t, err) - - sdk := New(tmpDir, nil) - _, err = sdk.detectScramble() - assert.Error(t, err) - assert.Contains(t, err.Error(), "scramble not found") - }) -} \ No newline at end of file diff --git a/pkg/sdk/diff.go b/pkg/sdk/diff.go deleted file mode 100644 index ebd4f6c..0000000 --- a/pkg/sdk/diff.go +++ /dev/null @@ -1,83 +0,0 @@ -package sdk - -import ( - "fmt" - - "github.com/getkin/kin-openapi/openapi3" - "github.com/oasdiff/oasdiff/checker" - "github.com/oasdiff/oasdiff/diff" - "github.com/oasdiff/oasdiff/load" -) - -// DiffResult holds the result of comparing two OpenAPI specs. -type DiffResult struct { - // Breaking is true if breaking changes were detected. - Breaking bool - // Changes is the list of breaking changes. - Changes []string - // Summary is a human-readable summary. - Summary string -} - -// Diff compares two OpenAPI specs and detects breaking changes. -func Diff(basePath, revisionPath string) (*DiffResult, error) { - loader := openapi3.NewLoader() - loader.IsExternalRefsAllowed = true - - // Load specs - baseSpec, err := load.NewSpecInfo(loader, load.NewSource(basePath)) - if err != nil { - return nil, fmt.Errorf("sdk.Diff: failed to load base spec: %w", err) - } - - revSpec, err := load.NewSpecInfo(loader, load.NewSource(revisionPath)) - if err != nil { - return nil, fmt.Errorf("sdk.Diff: failed to load revision spec: %w", err) - } - - // Compute diff with operations sources map for better error reporting - diffResult, operationsSources, err := diff.GetWithOperationsSourcesMap(diff.NewConfig(), baseSpec, revSpec) - if err != nil { - return nil, fmt.Errorf("sdk.Diff: failed to compute diff: %w", err) - } - - // Check for breaking changes - config := checker.NewConfig(checker.GetAllChecks()) - breaks := checker.CheckBackwardCompatibilityUntilLevel( - config, - diffResult, - operationsSources, - checker.ERR, // Only errors (breaking changes) - ) - - // Build result - result := &DiffResult{ - Breaking: len(breaks) > 0, - Changes: make([]string, 0, len(breaks)), - } - - localizer := checker.NewDefaultLocalizer() - for _, b := range breaks { - result.Changes = append(result.Changes, b.GetUncolorizedText(localizer)) - } - - if result.Breaking { - result.Summary = fmt.Sprintf("%d breaking change(s) detected", len(breaks)) - } else { - result.Summary = "No breaking changes" - } - - return result, nil -} - -// DiffExitCode returns the exit code for CI integration. -// 0 = no breaking changes, 1 = breaking changes, 2 = error -func DiffExitCode(result *DiffResult, err error) int { - if err != nil { - return 2 - } - if result.Breaking { - return 1 - } - return 0 -} diff --git a/pkg/sdk/diff_test.go b/pkg/sdk/diff_test.go deleted file mode 100644 index 812ab84..0000000 --- a/pkg/sdk/diff_test.go +++ /dev/null @@ -1,101 +0,0 @@ -package sdk - -import ( - "os" - "path/filepath" - "testing" -) - -func TestDiff_Good_NoBreaking(t *testing.T) { - tmpDir := t.TempDir() - - baseSpec := `openapi: "3.0.0" -info: - title: Test API - version: "1.0.0" -paths: - /health: - get: - operationId: getHealth - responses: - "200": - description: OK -` - revSpec := `openapi: "3.0.0" -info: - title: Test API - version: "1.1.0" -paths: - /health: - get: - operationId: getHealth - responses: - "200": - description: OK - /status: - get: - operationId: getStatus - responses: - "200": - description: OK -` - basePath := filepath.Join(tmpDir, "base.yaml") - revPath := filepath.Join(tmpDir, "rev.yaml") - os.WriteFile(basePath, []byte(baseSpec), 0644) - os.WriteFile(revPath, []byte(revSpec), 0644) - - result, err := Diff(basePath, revPath) - if err != nil { - t.Fatalf("Diff failed: %v", err) - } - if result.Breaking { - t.Error("expected no breaking changes for adding endpoint") - } -} - -func TestDiff_Good_Breaking(t *testing.T) { - tmpDir := t.TempDir() - - baseSpec := `openapi: "3.0.0" -info: - title: Test API - version: "1.0.0" -paths: - /health: - get: - operationId: getHealth - responses: - "200": - description: OK - /users: - get: - operationId: getUsers - responses: - "200": - description: OK -` - revSpec := `openapi: "3.0.0" -info: - title: Test API - version: "2.0.0" -paths: - /health: - get: - operationId: getHealth - responses: - "200": - description: OK -` - basePath := filepath.Join(tmpDir, "base.yaml") - revPath := filepath.Join(tmpDir, "rev.yaml") - os.WriteFile(basePath, []byte(baseSpec), 0644) - os.WriteFile(revPath, []byte(revSpec), 0644) - - result, err := Diff(basePath, revPath) - if err != nil { - t.Fatalf("Diff failed: %v", err) - } - if !result.Breaking { - t.Error("expected breaking change for removed endpoint") - } -} diff --git a/pkg/sdk/generators/generator.go b/pkg/sdk/generators/generator.go deleted file mode 100644 index 3a37f2e..0000000 --- a/pkg/sdk/generators/generator.go +++ /dev/null @@ -1,79 +0,0 @@ -// Package generators provides SDK code generators for different languages. -package generators - -import ( - "context" - "fmt" - "os" - "runtime" -) - -// Options holds common generation options. -type Options struct { - // SpecPath is the path to the OpenAPI spec file. - SpecPath string - // OutputDir is where to write the generated SDK. - OutputDir string - // PackageName is the package/module name. - PackageName string - // Version is the SDK version. - Version string -} - -// Generator defines the interface for SDK generators. -type Generator interface { - // Language returns the generator's target language identifier. - Language() string - - // Generate creates SDK from OpenAPI spec. - Generate(ctx context.Context, opts Options) error - - // Available checks if generator dependencies are installed. - Available() bool - - // Install returns instructions for installing the generator. - Install() string -} - -// Registry holds available generators. -type Registry struct { - generators map[string]Generator -} - -// NewRegistry creates a registry with all available generators. -func NewRegistry() *Registry { - r := &Registry{ - generators: make(map[string]Generator), - } - // Generators will be registered in subsequent tasks - return r -} - -// Get returns a generator by language. -func (r *Registry) Get(lang string) (Generator, bool) { - g, ok := r.generators[lang] - return g, ok -} - -// Register adds a generator to the registry. -func (r *Registry) Register(g Generator) { - r.generators[g.Language()] = g -} - -// Languages returns all registered language identifiers. -func (r *Registry) Languages() []string { - langs := make([]string, 0, len(r.generators)) - for lang := range r.generators { - langs = append(langs, lang) - } - return langs -} - -// dockerUserArgs returns Docker --user args for the current user on Unix systems. -// On Windows, Docker handles permissions differently, so no args are returned. -func dockerUserArgs() []string { - if runtime.GOOS == "windows" { - return nil - } - return []string{"--user", fmt.Sprintf("%d:%d", os.Getuid(), os.Getgid())} -} diff --git a/pkg/sdk/generators/go.go b/pkg/sdk/generators/go.go deleted file mode 100644 index e2c2bc1..0000000 --- a/pkg/sdk/generators/go.go +++ /dev/null @@ -1,87 +0,0 @@ -package generators - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" -) - -// GoGenerator generates Go SDKs from OpenAPI specs. -type GoGenerator struct{} - -// NewGoGenerator creates a new Go generator. -func NewGoGenerator() *GoGenerator { - return &GoGenerator{} -} - -// Language returns the generator's target language identifier. -func (g *GoGenerator) Language() string { - return "go" -} - -// Available checks if generator dependencies are installed. -func (g *GoGenerator) Available() bool { - _, err := exec.LookPath("oapi-codegen") - return err == nil -} - -// Install returns instructions for installing the generator. -func (g *GoGenerator) Install() string { - return "go install github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen@latest" -} - -// Generate creates SDK from OpenAPI spec. -func (g *GoGenerator) Generate(ctx context.Context, opts Options) error { - if err := os.MkdirAll(opts.OutputDir, 0755); err != nil { - return fmt.Errorf("go.Generate: failed to create output dir: %w", err) - } - - if g.Available() { - return g.generateNative(ctx, opts) - } - return g.generateDocker(ctx, opts) -} - -func (g *GoGenerator) generateNative(ctx context.Context, opts Options) error { - outputFile := filepath.Join(opts.OutputDir, "client.go") - - cmd := exec.CommandContext(ctx, "oapi-codegen", - "-package", opts.PackageName, - "-generate", "types,client", - "-o", outputFile, - opts.SpecPath, - ) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - if err := cmd.Run(); err != nil { - return fmt.Errorf("go.generateNative: %w", err) - } - - goMod := fmt.Sprintf("module %s\n\ngo 1.21\n", opts.PackageName) - return os.WriteFile(filepath.Join(opts.OutputDir, "go.mod"), []byte(goMod), 0644) -} - -func (g *GoGenerator) generateDocker(ctx context.Context, opts Options) error { - specDir := filepath.Dir(opts.SpecPath) - specName := filepath.Base(opts.SpecPath) - - args := []string{"run", "--rm"} - args = append(args, dockerUserArgs()...) - args = append(args, - "-v", specDir+":/spec", - "-v", opts.OutputDir+":/out", - "openapitools/openapi-generator-cli", "generate", - "-i", "/spec/"+specName, - "-g", "go", - "-o", "/out", - "--additional-properties=packageName="+opts.PackageName, - ) - - cmd := exec.CommandContext(ctx, "docker", args...) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - return cmd.Run() -} diff --git a/pkg/sdk/generators/go_test.go b/pkg/sdk/generators/go_test.go deleted file mode 100644 index 708b7dd..0000000 --- a/pkg/sdk/generators/go_test.go +++ /dev/null @@ -1,58 +0,0 @@ -package generators - -import ( - "context" - "os" - "path/filepath" - "testing" - "time" -) - -func TestGoGenerator_Good_Available(t *testing.T) { - g := NewGoGenerator() - - // These should not panic - lang := g.Language() - if lang != "go" { - t.Errorf("expected language 'go', got '%s'", lang) - } - - _ = g.Available() - - install := g.Install() - if install == "" { - t.Error("expected non-empty install instructions") - } -} - -func TestGoGenerator_Good_Generate(t *testing.T) { - g := NewGoGenerator() - if !g.Available() && !dockerAvailable() { - t.Skip("no Go generator available (neither native nor docker)") - } - - // Create temp directories - tmpDir := t.TempDir() - specPath := createTestSpec(t, tmpDir) - outputDir := filepath.Join(tmpDir, "output") - - opts := Options{ - SpecPath: specPath, - OutputDir: outputDir, - PackageName: "testclient", - Version: "1.0.0", - } - - ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) - defer cancel() - - err := g.Generate(ctx, opts) - if err != nil { - t.Fatalf("Generate failed: %v", err) - } - - // Verify output directory was created - if _, err := os.Stat(outputDir); os.IsNotExist(err) { - t.Error("output directory was not created") - } -} diff --git a/pkg/sdk/generators/php.go b/pkg/sdk/generators/php.go deleted file mode 100644 index 6403af3..0000000 --- a/pkg/sdk/generators/php.go +++ /dev/null @@ -1,68 +0,0 @@ -package generators - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" -) - -// PHPGenerator generates PHP SDKs from OpenAPI specs. -type PHPGenerator struct{} - -// NewPHPGenerator creates a new PHP generator. -func NewPHPGenerator() *PHPGenerator { - return &PHPGenerator{} -} - -// Language returns the generator's target language identifier. -func (g *PHPGenerator) Language() string { - return "php" -} - -// Available checks if generator dependencies are installed. -func (g *PHPGenerator) Available() bool { - _, err := exec.LookPath("docker") - return err == nil -} - -// Install returns instructions for installing the generator. -func (g *PHPGenerator) Install() string { - return "Docker is required for PHP SDK generation" -} - -// Generate creates SDK from OpenAPI spec. -func (g *PHPGenerator) Generate(ctx context.Context, opts Options) error { - if !g.Available() { - return fmt.Errorf("php.Generate: Docker is required but not available") - } - - if err := os.MkdirAll(opts.OutputDir, 0755); err != nil { - return fmt.Errorf("php.Generate: failed to create output dir: %w", err) - } - - specDir := filepath.Dir(opts.SpecPath) - specName := filepath.Base(opts.SpecPath) - - args := []string{"run", "--rm"} - args = append(args, dockerUserArgs()...) - args = append(args, - "-v", specDir+":/spec", - "-v", opts.OutputDir+":/out", - "openapitools/openapi-generator-cli", "generate", - "-i", "/spec/"+specName, - "-g", "php", - "-o", "/out", - "--additional-properties=invokerPackage="+opts.PackageName, - ) - - cmd := exec.CommandContext(ctx, "docker", args...) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - if err := cmd.Run(); err != nil { - return fmt.Errorf("php.Generate: %w", err) - } - return nil -} diff --git a/pkg/sdk/generators/php_test.go b/pkg/sdk/generators/php_test.go deleted file mode 100644 index a3a6e4a..0000000 --- a/pkg/sdk/generators/php_test.go +++ /dev/null @@ -1,58 +0,0 @@ -package generators - -import ( - "context" - "os" - "path/filepath" - "testing" - "time" -) - -func TestPHPGenerator_Good_Available(t *testing.T) { - g := NewPHPGenerator() - - // These should not panic - lang := g.Language() - if lang != "php" { - t.Errorf("expected language 'php', got '%s'", lang) - } - - _ = g.Available() - - install := g.Install() - if install == "" { - t.Error("expected non-empty install instructions") - } -} - -func TestPHPGenerator_Good_Generate(t *testing.T) { - g := NewPHPGenerator() - if !g.Available() { - t.Skip("no PHP generator available (docker not installed)") - } - - // Create temp directories - tmpDir := t.TempDir() - specPath := createTestSpec(t, tmpDir) - outputDir := filepath.Join(tmpDir, "output") - - opts := Options{ - SpecPath: specPath, - OutputDir: outputDir, - PackageName: "TestClient", - Version: "1.0.0", - } - - ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) - defer cancel() - - err := g.Generate(ctx, opts) - if err != nil { - t.Fatalf("Generate failed: %v", err) - } - - // Verify output directory was created - if _, err := os.Stat(outputDir); os.IsNotExist(err) { - t.Error("output directory was not created") - } -} diff --git a/pkg/sdk/generators/python.go b/pkg/sdk/generators/python.go deleted file mode 100644 index bd5f91f..0000000 --- a/pkg/sdk/generators/python.go +++ /dev/null @@ -1,80 +0,0 @@ -package generators - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" -) - -// PythonGenerator generates Python SDKs from OpenAPI specs. -type PythonGenerator struct{} - -// NewPythonGenerator creates a new Python generator. -func NewPythonGenerator() *PythonGenerator { - return &PythonGenerator{} -} - -// Language returns the generator's target language identifier. -func (g *PythonGenerator) Language() string { - return "python" -} - -// Available checks if generator dependencies are installed. -func (g *PythonGenerator) Available() bool { - _, err := exec.LookPath("openapi-python-client") - return err == nil -} - -// Install returns instructions for installing the generator. -func (g *PythonGenerator) Install() string { - return "pip install openapi-python-client" -} - -// Generate creates SDK from OpenAPI spec. -func (g *PythonGenerator) Generate(ctx context.Context, opts Options) error { - if err := os.MkdirAll(opts.OutputDir, 0755); err != nil { - return fmt.Errorf("python.Generate: failed to create output dir: %w", err) - } - - if g.Available() { - return g.generateNative(ctx, opts) - } - return g.generateDocker(ctx, opts) -} - -func (g *PythonGenerator) generateNative(ctx context.Context, opts Options) error { - parentDir := filepath.Dir(opts.OutputDir) - - cmd := exec.CommandContext(ctx, "openapi-python-client", "generate", - "--path", opts.SpecPath, - "--output-path", opts.OutputDir, - ) - cmd.Dir = parentDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - return cmd.Run() -} - -func (g *PythonGenerator) generateDocker(ctx context.Context, opts Options) error { - specDir := filepath.Dir(opts.SpecPath) - specName := filepath.Base(opts.SpecPath) - - args := []string{"run", "--rm"} - args = append(args, dockerUserArgs()...) - args = append(args, - "-v", specDir+":/spec", - "-v", opts.OutputDir+":/out", - "openapitools/openapi-generator-cli", "generate", - "-i", "/spec/"+specName, - "-g", "python", - "-o", "/out", - "--additional-properties=packageName="+opts.PackageName, - ) - - cmd := exec.CommandContext(ctx, "docker", args...) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - return cmd.Run() -} diff --git a/pkg/sdk/generators/python_test.go b/pkg/sdk/generators/python_test.go deleted file mode 100644 index 5b03a76..0000000 --- a/pkg/sdk/generators/python_test.go +++ /dev/null @@ -1,58 +0,0 @@ -package generators - -import ( - "context" - "os" - "path/filepath" - "testing" - "time" -) - -func TestPythonGenerator_Good_Available(t *testing.T) { - g := NewPythonGenerator() - - // These should not panic - lang := g.Language() - if lang != "python" { - t.Errorf("expected language 'python', got '%s'", lang) - } - - _ = g.Available() - - install := g.Install() - if install == "" { - t.Error("expected non-empty install instructions") - } -} - -func TestPythonGenerator_Good_Generate(t *testing.T) { - g := NewPythonGenerator() - if !g.Available() && !dockerAvailable() { - t.Skip("no Python generator available (neither native nor docker)") - } - - // Create temp directories - tmpDir := t.TempDir() - specPath := createTestSpec(t, tmpDir) - outputDir := filepath.Join(tmpDir, "output") - - opts := Options{ - SpecPath: specPath, - OutputDir: outputDir, - PackageName: "testclient", - Version: "1.0.0", - } - - ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) - defer cancel() - - err := g.Generate(ctx, opts) - if err != nil { - t.Fatalf("Generate failed: %v", err) - } - - // Verify output directory was created - if _, err := os.Stat(outputDir); os.IsNotExist(err) { - t.Error("output directory was not created") - } -} diff --git a/pkg/sdk/generators/typescript.go b/pkg/sdk/generators/typescript.go deleted file mode 100644 index c88b9b6..0000000 --- a/pkg/sdk/generators/typescript.go +++ /dev/null @@ -1,110 +0,0 @@ -package generators - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" -) - -// TypeScriptGenerator generates TypeScript SDKs from OpenAPI specs. -type TypeScriptGenerator struct{} - -// NewTypeScriptGenerator creates a new TypeScript generator. -func NewTypeScriptGenerator() *TypeScriptGenerator { - return &TypeScriptGenerator{} -} - -// Language returns the generator's target language identifier. -func (g *TypeScriptGenerator) Language() string { - return "typescript" -} - -// Available checks if generator dependencies are installed. -func (g *TypeScriptGenerator) Available() bool { - _, err := exec.LookPath("openapi-typescript-codegen") - if err == nil { - return true - } - _, err = exec.LookPath("npx") - return err == nil -} - -// Install returns instructions for installing the generator. -func (g *TypeScriptGenerator) Install() string { - return "npm install -g openapi-typescript-codegen" -} - -// Generate creates SDK from OpenAPI spec. -func (g *TypeScriptGenerator) Generate(ctx context.Context, opts Options) error { - if err := os.MkdirAll(opts.OutputDir, 0755); err != nil { - return fmt.Errorf("typescript.Generate: failed to create output dir: %w", err) - } - - if g.nativeAvailable() { - return g.generateNative(ctx, opts) - } - if g.npxAvailable() { - return g.generateNpx(ctx, opts) - } - return g.generateDocker(ctx, opts) -} - -func (g *TypeScriptGenerator) nativeAvailable() bool { - _, err := exec.LookPath("openapi-typescript-codegen") - return err == nil -} - -func (g *TypeScriptGenerator) npxAvailable() bool { - _, err := exec.LookPath("npx") - return err == nil -} - -func (g *TypeScriptGenerator) generateNative(ctx context.Context, opts Options) error { - cmd := exec.CommandContext(ctx, "openapi-typescript-codegen", - "--input", opts.SpecPath, - "--output", opts.OutputDir, - "--name", opts.PackageName, - ) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - return cmd.Run() -} - -func (g *TypeScriptGenerator) generateNpx(ctx context.Context, opts Options) error { - cmd := exec.CommandContext(ctx, "npx", "openapi-typescript-codegen", - "--input", opts.SpecPath, - "--output", opts.OutputDir, - "--name", opts.PackageName, - ) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - return cmd.Run() -} - -func (g *TypeScriptGenerator) generateDocker(ctx context.Context, opts Options) error { - specDir := filepath.Dir(opts.SpecPath) - specName := filepath.Base(opts.SpecPath) - - args := []string{"run", "--rm"} - args = append(args, dockerUserArgs()...) - args = append(args, - "-v", specDir+":/spec", - "-v", opts.OutputDir+":/out", - "openapitools/openapi-generator-cli", "generate", - "-i", "/spec/"+specName, - "-g", "typescript-fetch", - "-o", "/out", - "--additional-properties=npmName="+opts.PackageName, - ) - - cmd := exec.CommandContext(ctx, "docker", args...) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - if err := cmd.Run(); err != nil { - return fmt.Errorf("typescript.generateDocker: %w", err) - } - return nil -} diff --git a/pkg/sdk/generators/typescript_test.go b/pkg/sdk/generators/typescript_test.go deleted file mode 100644 index 3a40443..0000000 --- a/pkg/sdk/generators/typescript_test.go +++ /dev/null @@ -1,87 +0,0 @@ -package generators - -import ( - "context" - "os" - "os/exec" - "path/filepath" - "testing" - "time" -) - -// dockerAvailable checks if docker is available for fallback generation. -func dockerAvailable() bool { - _, err := exec.LookPath("docker") - return err == nil -} - -// createTestSpec creates a minimal OpenAPI spec for testing. -func createTestSpec(t *testing.T, dir string) string { - t.Helper() - spec := `openapi: "3.0.0" -info: - title: Test API - version: "1.0.0" -paths: - /health: - get: - summary: Health check - responses: - "200": - description: OK -` - specPath := filepath.Join(dir, "openapi.yaml") - if err := os.WriteFile(specPath, []byte(spec), 0644); err != nil { - t.Fatalf("failed to write test spec: %v", err) - } - return specPath -} - -func TestTypeScriptGenerator_Good_Available(t *testing.T) { - g := NewTypeScriptGenerator() - - // These should not panic - lang := g.Language() - if lang != "typescript" { - t.Errorf("expected language 'typescript', got '%s'", lang) - } - - _ = g.Available() - - install := g.Install() - if install == "" { - t.Error("expected non-empty install instructions") - } -} - -func TestTypeScriptGenerator_Good_Generate(t *testing.T) { - g := NewTypeScriptGenerator() - if !g.Available() && !dockerAvailable() { - t.Skip("no TypeScript generator available (neither native nor docker)") - } - - // Create temp directories - tmpDir := t.TempDir() - specPath := createTestSpec(t, tmpDir) - outputDir := filepath.Join(tmpDir, "output") - - opts := Options{ - SpecPath: specPath, - OutputDir: outputDir, - PackageName: "testclient", - Version: "1.0.0", - } - - ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) - defer cancel() - - err := g.Generate(ctx, opts) - if err != nil { - t.Fatalf("Generate failed: %v", err) - } - - // Verify output directory was created - if _, err := os.Stat(outputDir); os.IsNotExist(err) { - t.Error("output directory was not created") - } -} diff --git a/pkg/sdk/sdk.go b/pkg/sdk/sdk.go deleted file mode 100644 index 1ed43fc..0000000 --- a/pkg/sdk/sdk.go +++ /dev/null @@ -1,141 +0,0 @@ -// Package sdk provides OpenAPI SDK generation and diff capabilities. -package sdk - -import ( - "context" - "fmt" - "path/filepath" - - "github.com/host-uk/core/pkg/sdk/generators" -) - -// Config holds SDK generation configuration from .core/release.yaml. -type Config struct { - // Spec is the path to the OpenAPI spec file (auto-detected if empty). - Spec string `yaml:"spec,omitempty"` - // Languages to generate SDKs for. - Languages []string `yaml:"languages,omitempty"` - // Output directory (default: sdk/). - Output string `yaml:"output,omitempty"` - // Package naming configuration. - Package PackageConfig `yaml:"package,omitempty"` - // Diff configuration for breaking change detection. - Diff DiffConfig `yaml:"diff,omitempty"` - // Publish configuration for monorepo publishing. - Publish PublishConfig `yaml:"publish,omitempty"` -} - -// PackageConfig holds package naming configuration. -type PackageConfig struct { - // Name is the base package name. - Name string `yaml:"name,omitempty"` - // Version is the SDK version (supports templates like {{.Version}}). - Version string `yaml:"version,omitempty"` -} - -// DiffConfig holds breaking change detection configuration. -type DiffConfig struct { - // Enabled determines whether to run diff checks. - Enabled bool `yaml:"enabled,omitempty"` - // FailOnBreaking fails the release if breaking changes are detected. - FailOnBreaking bool `yaml:"fail_on_breaking,omitempty"` -} - -// PublishConfig holds monorepo publishing configuration. -type PublishConfig struct { - // Repo is the SDK monorepo (e.g., "myorg/sdks"). - Repo string `yaml:"repo,omitempty"` - // Path is the subdirectory for this SDK (e.g., "packages/myapi"). - Path string `yaml:"path,omitempty"` -} - -// SDK orchestrates OpenAPI SDK generation. -type SDK struct { - config *Config - projectDir string - version string -} - -// New creates a new SDK instance. -func New(projectDir string, config *Config) *SDK { - if config == nil { - config = DefaultConfig() - } - return &SDK{ - config: config, - projectDir: projectDir, - } -} - -// SetVersion sets the SDK version for generation. -// This updates both the internal version field and the config's Package.Version. -func (s *SDK) SetVersion(version string) { - s.version = version - if s.config != nil { - s.config.Package.Version = version - } -} - -// DefaultConfig returns sensible defaults for SDK configuration. -func DefaultConfig() *Config { - return &Config{ - Languages: []string{"typescript", "python", "go", "php"}, - Output: "sdk", - Diff: DiffConfig{ - Enabled: true, - FailOnBreaking: false, - }, - } -} - -// Generate generates SDKs for all configured languages. -func (s *SDK) Generate(ctx context.Context) error { - // Generate for each language - for _, lang := range s.config.Languages { - if err := s.GenerateLanguage(ctx, lang); err != nil { - return err - } - } - - return nil -} - -// GenerateLanguage generates SDK for a specific language. -func (s *SDK) GenerateLanguage(ctx context.Context, lang string) error { - specPath, err := s.DetectSpec() - if err != nil { - return err - } - - registry := generators.NewRegistry() - registry.Register(generators.NewTypeScriptGenerator()) - registry.Register(generators.NewPythonGenerator()) - registry.Register(generators.NewGoGenerator()) - registry.Register(generators.NewPHPGenerator()) - - gen, ok := registry.Get(lang) - if !ok { - return fmt.Errorf("sdk.GenerateLanguage: unknown language: %s", lang) - } - - if !gen.Available() { - fmt.Printf("Warning: %s generator not available. Install with: %s\n", lang, gen.Install()) - fmt.Printf("Falling back to Docker...\n") - } - - outputDir := filepath.Join(s.projectDir, s.config.Output, lang) - opts := generators.Options{ - SpecPath: specPath, - OutputDir: outputDir, - PackageName: s.config.Package.Name, - Version: s.config.Package.Version, - } - - fmt.Printf("Generating %s SDK...\n", lang) - if err := gen.Generate(ctx, opts); err != nil { - return fmt.Errorf("sdk.GenerateLanguage: %s generation failed: %w", lang, err) - } - fmt.Printf("Generated %s SDK at %s\n", lang, outputDir) - - return nil -} diff --git a/pkg/sdk/sdk_test.go b/pkg/sdk/sdk_test.go deleted file mode 100644 index 02b3db7..0000000 --- a/pkg/sdk/sdk_test.go +++ /dev/null @@ -1,79 +0,0 @@ -package sdk - -import ( - "context" - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestSDK_Good_SetVersion(t *testing.T) { - s := New("/tmp", nil) - s.SetVersion("v1.2.3") - - assert.Equal(t, "v1.2.3", s.version) -} - -func TestSDK_Good_VersionPassedToGenerator(t *testing.T) { - config := &Config{ - Languages: []string{"typescript"}, - Output: "sdk", - Package: PackageConfig{ - Name: "test-sdk", - }, - } - s := New("/tmp", config) - s.SetVersion("v2.0.0") - - assert.Equal(t, "v2.0.0", s.config.Package.Version) -} - -func TestDefaultConfig(t *testing.T) { - cfg := DefaultConfig() - assert.Contains(t, cfg.Languages, "typescript") - assert.Equal(t, "sdk", cfg.Output) - assert.True(t, cfg.Diff.Enabled) -} - -func TestSDK_New(t *testing.T) { - t.Run("with nil config", func(t *testing.T) { - s := New("/tmp", nil) - assert.NotNil(t, s.config) - assert.Equal(t, "sdk", s.config.Output) - }) - - t.Run("with custom config", func(t *testing.T) { - cfg := &Config{Output: "custom"} - s := New("/tmp", cfg) - assert.Equal(t, "custom", s.config.Output) - }) -} - -func TestSDK_GenerateLanguage_Bad(t *testing.T) { - - t.Run("unknown language", func(t *testing.T) { - - tmpDir := t.TempDir() - - specPath := filepath.Join(tmpDir, "openapi.yaml") - - err := os.WriteFile(specPath, []byte("openapi: 3.0.0"), 0644) - - require.NoError(t, err) - - - - s := New(tmpDir, nil) - - err = s.GenerateLanguage(context.Background(), "invalid-lang") - - assert.Error(t, err) - - assert.Contains(t, err.Error(), "unknown language") - - }) - -} diff --git a/pkg/security/cmd.go b/pkg/security/cmd.go deleted file mode 100644 index 23b13f4..0000000 --- a/pkg/security/cmd.go +++ /dev/null @@ -1,7 +0,0 @@ -package security - -import "github.com/host-uk/core/pkg/cli" - -func init() { - cli.RegisterCommands(AddSecurityCommands) -} diff --git a/pkg/security/cmd_alerts.go b/pkg/security/cmd_alerts.go deleted file mode 100644 index 62b205d..0000000 --- a/pkg/security/cmd_alerts.go +++ /dev/null @@ -1,216 +0,0 @@ -package security - -import ( - "encoding/json" - "fmt" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -func addAlertsCommand(parent *cli.Command) { - cmd := &cli.Command{ - Use: "alerts", - Short: i18n.T("cmd.security.alerts.short"), - Long: i18n.T("cmd.security.alerts.long"), - RunE: func(c *cli.Command, args []string) error { - return runAlerts() - }, - } - - cmd.Flags().StringVar(&securityRegistryPath, "registry", "", i18n.T("common.flag.registry")) - cmd.Flags().StringVar(&securityRepo, "repo", "", i18n.T("cmd.security.flag.repo")) - cmd.Flags().StringVar(&securitySeverity, "severity", "", i18n.T("cmd.security.flag.severity")) - cmd.Flags().BoolVar(&securityJSON, "json", false, i18n.T("common.flag.json")) - - parent.AddCommand(cmd) -} - -// AlertOutput represents a unified alert for output. -type AlertOutput struct { - Repo string `json:"repo"` - Severity string `json:"severity"` - ID string `json:"id"` - Package string `json:"package,omitempty"` - Version string `json:"version,omitempty"` - Location string `json:"location,omitempty"` - Type string `json:"type"` - Message string `json:"message"` -} - -func runAlerts() error { - if err := checkGH(); err != nil { - return err - } - - reg, err := loadRegistry(securityRegistryPath) - if err != nil { - return err - } - - repoList := getReposToCheck(reg, securityRepo) - if len(repoList) == 0 { - return cli.Err("repo not found: %s", securityRepo) - } - - var allAlerts []AlertOutput - summary := &AlertSummary{} - - for _, repo := range repoList { - repoFullName := fmt.Sprintf("%s/%s", reg.Org, repo.Name) - - // Fetch Dependabot alerts - depAlerts, err := fetchDependabotAlerts(repoFullName) - if err == nil { - for _, alert := range depAlerts { - if alert.State != "open" { - continue - } - severity := alert.Advisory.Severity - if !filterBySeverity(severity, securitySeverity) { - continue - } - summary.Add(severity) - allAlerts = append(allAlerts, AlertOutput{ - Repo: repo.Name, - Severity: severity, - ID: alert.Advisory.CVEID, - Package: alert.Dependency.Package.Name, - Version: alert.SecurityVulnerability.VulnerableVersionRange, - Type: "dependabot", - Message: alert.Advisory.Summary, - }) - } - } - - // Fetch code scanning alerts - codeAlerts, err := fetchCodeScanningAlerts(repoFullName) - if err == nil { - for _, alert := range codeAlerts { - if alert.State != "open" { - continue - } - severity := alert.Rule.Severity - if !filterBySeverity(severity, securitySeverity) { - continue - } - summary.Add(severity) - location := fmt.Sprintf("%s:%d", alert.MostRecentInstance.Location.Path, alert.MostRecentInstance.Location.StartLine) - allAlerts = append(allAlerts, AlertOutput{ - Repo: repo.Name, - Severity: severity, - ID: alert.Rule.ID, - Location: location, - Type: alert.Tool.Name, - Message: alert.Rule.Description, - }) - } - } - - // Fetch secret scanning alerts - secretAlerts, err := fetchSecretScanningAlerts(repoFullName) - if err == nil { - for _, alert := range secretAlerts { - if alert.State != "open" { - continue - } - if !filterBySeverity("high", securitySeverity) { - continue - } - summary.Add("high") // Secrets are always high severity - allAlerts = append(allAlerts, AlertOutput{ - Repo: repo.Name, - Severity: "high", - ID: fmt.Sprintf("secret-%d", alert.Number), - Type: "secret-scanning", - Message: alert.SecretType, - }) - } - } - } - - if securityJSON { - output, err := json.MarshalIndent(allAlerts, "", " ") - if err != nil { - return cli.Wrap(err, "marshal JSON output") - } - cli.Text(string(output)) - return nil - } - - // Print summary - cli.Blank() - cli.Print("%s %s\n", cli.DimStyle.Render("Alerts:"), summary.String()) - cli.Blank() - - if len(allAlerts) == 0 { - return nil - } - - // Print table - for _, alert := range allAlerts { - sevStyle := severityStyle(alert.Severity) - - // Format: repo SEVERITY ID package/location type - location := alert.Package - if location == "" { - location = alert.Location - } - if alert.Version != "" { - location = fmt.Sprintf("%s %s", location, cli.DimStyle.Render(alert.Version)) - } - - cli.Print("%-20s %s %-16s %-40s %s\n", - cli.ValueStyle.Render(alert.Repo), - sevStyle.Render(fmt.Sprintf("%-8s", alert.Severity)), - alert.ID, - location, - cli.DimStyle.Render(alert.Type), - ) - } - cli.Blank() - - return nil -} - -func fetchDependabotAlerts(repoFullName string) ([]DependabotAlert, error) { - endpoint := fmt.Sprintf("repos/%s/dependabot/alerts?state=open", repoFullName) - output, err := runGHAPI(endpoint) - if err != nil { - return nil, cli.Wrap(err, fmt.Sprintf("fetch dependabot alerts for %s", repoFullName)) - } - - var alerts []DependabotAlert - if err := json.Unmarshal(output, &alerts); err != nil { - return nil, cli.Wrap(err, fmt.Sprintf("parse dependabot alerts for %s", repoFullName)) - } - return alerts, nil -} - -func fetchCodeScanningAlerts(repoFullName string) ([]CodeScanningAlert, error) { - endpoint := fmt.Sprintf("repos/%s/code-scanning/alerts?state=open", repoFullName) - output, err := runGHAPI(endpoint) - if err != nil { - return nil, cli.Wrap(err, fmt.Sprintf("fetch code-scanning alerts for %s", repoFullName)) - } - - var alerts []CodeScanningAlert - if err := json.Unmarshal(output, &alerts); err != nil { - return nil, cli.Wrap(err, fmt.Sprintf("parse code-scanning alerts for %s", repoFullName)) - } - return alerts, nil -} - -func fetchSecretScanningAlerts(repoFullName string) ([]SecretScanningAlert, error) { - endpoint := fmt.Sprintf("repos/%s/secret-scanning/alerts?state=open", repoFullName) - output, err := runGHAPI(endpoint) - if err != nil { - return nil, cli.Wrap(err, fmt.Sprintf("fetch secret-scanning alerts for %s", repoFullName)) - } - - var alerts []SecretScanningAlert - if err := json.Unmarshal(output, &alerts); err != nil { - return nil, cli.Wrap(err, fmt.Sprintf("parse secret-scanning alerts for %s", repoFullName)) - } - return alerts, nil -} diff --git a/pkg/security/cmd_deps.go b/pkg/security/cmd_deps.go deleted file mode 100644 index 0d13a93..0000000 --- a/pkg/security/cmd_deps.go +++ /dev/null @@ -1,134 +0,0 @@ -package security - -import ( - "encoding/json" - "fmt" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -func addDepsCommand(parent *cli.Command) { - cmd := &cli.Command{ - Use: "deps", - Short: i18n.T("cmd.security.deps.short"), - Long: i18n.T("cmd.security.deps.long"), - RunE: func(c *cli.Command, args []string) error { - return runDeps() - }, - } - - cmd.Flags().StringVar(&securityRegistryPath, "registry", "", i18n.T("common.flag.registry")) - cmd.Flags().StringVar(&securityRepo, "repo", "", i18n.T("cmd.security.flag.repo")) - cmd.Flags().StringVar(&securitySeverity, "severity", "", i18n.T("cmd.security.flag.severity")) - cmd.Flags().BoolVar(&securityJSON, "json", false, i18n.T("common.flag.json")) - - parent.AddCommand(cmd) -} - -// DepAlert represents a dependency vulnerability for output. -type DepAlert struct { - Repo string `json:"repo"` - Severity string `json:"severity"` - CVE string `json:"cve"` - Package string `json:"package"` - Ecosystem string `json:"ecosystem"` - Vulnerable string `json:"vulnerable_range"` - PatchedVersion string `json:"patched_version,omitempty"` - Manifest string `json:"manifest"` - Summary string `json:"summary"` -} - -func runDeps() error { - if err := checkGH(); err != nil { - return err - } - - reg, err := loadRegistry(securityRegistryPath) - if err != nil { - return err - } - - repoList := getReposToCheck(reg, securityRepo) - if len(repoList) == 0 { - return cli.Err("repo not found: %s", securityRepo) - } - - var allAlerts []DepAlert - summary := &AlertSummary{} - - for _, repo := range repoList { - repoFullName := fmt.Sprintf("%s/%s", reg.Org, repo.Name) - - alerts, err := fetchDependabotAlerts(repoFullName) - if err != nil { - continue - } - - for _, alert := range alerts { - if alert.State != "open" { - continue - } - - severity := alert.Advisory.Severity - if !filterBySeverity(severity, securitySeverity) { - continue - } - - summary.Add(severity) - - depAlert := DepAlert{ - Repo: repo.Name, - Severity: severity, - CVE: alert.Advisory.CVEID, - Package: alert.Dependency.Package.Name, - Ecosystem: alert.Dependency.Package.Ecosystem, - Vulnerable: alert.SecurityVulnerability.VulnerableVersionRange, - PatchedVersion: alert.SecurityVulnerability.FirstPatchedVersion.Identifier, - Manifest: alert.Dependency.ManifestPath, - Summary: alert.Advisory.Summary, - } - allAlerts = append(allAlerts, depAlert) - } - } - - if securityJSON { - output, err := json.MarshalIndent(allAlerts, "", " ") - if err != nil { - return cli.Wrap(err, "marshal JSON output") - } - cli.Text(string(output)) - return nil - } - - // Print summary - cli.Blank() - cli.Print("%s %s\n", cli.DimStyle.Render("Dependabot:"), summary.String()) - cli.Blank() - - if len(allAlerts) == 0 { - return nil - } - - // Print table - for _, alert := range allAlerts { - sevStyle := severityStyle(alert.Severity) - - // Format upgrade suggestion - upgrade := alert.Vulnerable - if alert.PatchedVersion != "" { - upgrade = fmt.Sprintf("%s -> %s", alert.Vulnerable, cli.SuccessStyle.Render(alert.PatchedVersion)) - } - - cli.Print("%-16s %s %-16s %-30s %s\n", - cli.ValueStyle.Render(alert.Repo), - sevStyle.Render(fmt.Sprintf("%-8s", alert.Severity)), - alert.CVE, - alert.Package, - upgrade, - ) - } - cli.Blank() - - return nil -} diff --git a/pkg/security/cmd_scan.go b/pkg/security/cmd_scan.go deleted file mode 100644 index a11e2ad..0000000 --- a/pkg/security/cmd_scan.go +++ /dev/null @@ -1,142 +0,0 @@ -package security - -import ( - "encoding/json" - "fmt" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -var ( - scanTool string -) - -func addScanCommand(parent *cli.Command) { - cmd := &cli.Command{ - Use: "scan", - Short: i18n.T("cmd.security.scan.short"), - Long: i18n.T("cmd.security.scan.long"), - RunE: func(c *cli.Command, args []string) error { - return runScan() - }, - } - - cmd.Flags().StringVar(&securityRegistryPath, "registry", "", i18n.T("common.flag.registry")) - cmd.Flags().StringVar(&securityRepo, "repo", "", i18n.T("cmd.security.flag.repo")) - cmd.Flags().StringVar(&securitySeverity, "severity", "", i18n.T("cmd.security.flag.severity")) - cmd.Flags().StringVar(&scanTool, "tool", "", i18n.T("cmd.security.scan.flag.tool")) - cmd.Flags().BoolVar(&securityJSON, "json", false, i18n.T("common.flag.json")) - - parent.AddCommand(cmd) -} - -// ScanAlert represents a code scanning alert for output. -type ScanAlert struct { - Repo string `json:"repo"` - Severity string `json:"severity"` - RuleID string `json:"rule_id"` - Tool string `json:"tool"` - Path string `json:"path"` - Line int `json:"line"` - Description string `json:"description"` - Message string `json:"message"` -} - -func runScan() error { - if err := checkGH(); err != nil { - return err - } - - reg, err := loadRegistry(securityRegistryPath) - if err != nil { - return err - } - - repoList := getReposToCheck(reg, securityRepo) - if len(repoList) == 0 { - return cli.Err("repo not found: %s", securityRepo) - } - - var allAlerts []ScanAlert - summary := &AlertSummary{} - - for _, repo := range repoList { - repoFullName := fmt.Sprintf("%s/%s", reg.Org, repo.Name) - - alerts, err := fetchCodeScanningAlerts(repoFullName) - if err != nil { - continue - } - - for _, alert := range alerts { - if alert.State != "open" { - continue - } - - // Filter by tool if specified - if scanTool != "" && alert.Tool.Name != scanTool { - continue - } - - severity := alert.Rule.Severity - if severity == "" { - severity = "medium" // Default if not specified - } - - if !filterBySeverity(severity, securitySeverity) { - continue - } - - summary.Add(severity) - - scanAlert := ScanAlert{ - Repo: repo.Name, - Severity: severity, - RuleID: alert.Rule.ID, - Tool: alert.Tool.Name, - Path: alert.MostRecentInstance.Location.Path, - Line: alert.MostRecentInstance.Location.StartLine, - Description: alert.Rule.Description, - Message: alert.MostRecentInstance.Message.Text, - } - allAlerts = append(allAlerts, scanAlert) - } - } - - if securityJSON { - output, err := json.MarshalIndent(allAlerts, "", " ") - if err != nil { - return cli.Wrap(err, "marshal JSON output") - } - cli.Text(string(output)) - return nil - } - - // Print summary - cli.Blank() - cli.Print("%s %s\n", cli.DimStyle.Render("Code Scanning:"), summary.String()) - cli.Blank() - - if len(allAlerts) == 0 { - return nil - } - - // Print table - for _, alert := range allAlerts { - sevStyle := severityStyle(alert.Severity) - - location := fmt.Sprintf("%s:%d", alert.Path, alert.Line) - - cli.Print("%-16s %s %-20s %-40s %s\n", - cli.ValueStyle.Render(alert.Repo), - sevStyle.Render(fmt.Sprintf("%-8s", alert.Severity)), - alert.RuleID, - location, - cli.DimStyle.Render(alert.Tool), - ) - } - cli.Blank() - - return nil -} diff --git a/pkg/security/cmd_secrets.go b/pkg/security/cmd_secrets.go deleted file mode 100644 index 87549db..0000000 --- a/pkg/security/cmd_secrets.go +++ /dev/null @@ -1,121 +0,0 @@ -package security - -import ( - "encoding/json" - "fmt" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -func addSecretsCommand(parent *cli.Command) { - cmd := &cli.Command{ - Use: "secrets", - Short: i18n.T("cmd.security.secrets.short"), - Long: i18n.T("cmd.security.secrets.long"), - RunE: func(c *cli.Command, args []string) error { - return runSecrets() - }, - } - - cmd.Flags().StringVar(&securityRegistryPath, "registry", "", i18n.T("common.flag.registry")) - cmd.Flags().StringVar(&securityRepo, "repo", "", i18n.T("cmd.security.flag.repo")) - cmd.Flags().BoolVar(&securityJSON, "json", false, i18n.T("common.flag.json")) - - parent.AddCommand(cmd) -} - -// SecretAlert represents a secret scanning alert for output. -type SecretAlert struct { - Repo string `json:"repo"` - Number int `json:"number"` - SecretType string `json:"secret_type"` - State string `json:"state"` - Resolution string `json:"resolution,omitempty"` - PushProtection bool `json:"push_protection_bypassed"` -} - -func runSecrets() error { - if err := checkGH(); err != nil { - return err - } - - reg, err := loadRegistry(securityRegistryPath) - if err != nil { - return err - } - - repoList := getReposToCheck(reg, securityRepo) - if len(repoList) == 0 { - return cli.Err("repo not found: %s", securityRepo) - } - - var allAlerts []SecretAlert - openCount := 0 - - for _, repo := range repoList { - repoFullName := fmt.Sprintf("%s/%s", reg.Org, repo.Name) - - alerts, err := fetchSecretScanningAlerts(repoFullName) - if err != nil { - continue - } - - for _, alert := range alerts { - if alert.State != "open" { - continue - } - openCount++ - - secretAlert := SecretAlert{ - Repo: repo.Name, - Number: alert.Number, - SecretType: alert.SecretType, - State: alert.State, - Resolution: alert.Resolution, - PushProtection: alert.PushProtection, - } - allAlerts = append(allAlerts, secretAlert) - } - } - - if securityJSON { - output, err := json.MarshalIndent(allAlerts, "", " ") - if err != nil { - return cli.Wrap(err, "marshal JSON output") - } - cli.Text(string(output)) - return nil - } - - // Print summary - cli.Blank() - if openCount > 0 { - cli.Print("%s %s\n", cli.DimStyle.Render("Secrets:"), cli.ErrorStyle.Render(fmt.Sprintf("%d open", openCount))) - } else { - cli.Print("%s %s\n", cli.DimStyle.Render("Secrets:"), cli.SuccessStyle.Render("No exposed secrets")) - } - cli.Blank() - - if len(allAlerts) == 0 { - return nil - } - - // Print table - for _, alert := range allAlerts { - bypassed := "" - if alert.PushProtection { - bypassed = cli.WarningStyle.Render(" (push protection bypassed)") - } - - cli.Print("%-16s %-6d %-30s%s\n", - cli.ValueStyle.Render(alert.Repo), - alert.Number, - cli.ErrorStyle.Render(alert.SecretType), - bypassed, - ) - } - cli.Blank() - - return nil -} diff --git a/pkg/security/cmd_security.go b/pkg/security/cmd_security.go deleted file mode 100644 index 63b3d1b..0000000 --- a/pkg/security/cmd_security.go +++ /dev/null @@ -1,259 +0,0 @@ -package security - -import ( - "fmt" - "os/exec" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" -) - -var ( - // Command flags - securityRegistryPath string - securityRepo string - securitySeverity string - securityJSON bool -) - -// AddSecurityCommands adds the 'security' command to the root. -func AddSecurityCommands(root *cli.Command) { - secCmd := &cli.Command{ - Use: "security", - Short: i18n.T("cmd.security.short"), - Long: i18n.T("cmd.security.long"), - } - - addAlertsCommand(secCmd) - addDepsCommand(secCmd) - addScanCommand(secCmd) - addSecretsCommand(secCmd) - - root.AddCommand(secCmd) -} - -// DependabotAlert represents a Dependabot vulnerability alert. -type DependabotAlert struct { - Number int `json:"number"` - State string `json:"state"` - Advisory struct { - Severity string `json:"severity"` - CVEID string `json:"cve_id"` - Summary string `json:"summary"` - Description string `json:"description"` - } `json:"security_advisory"` - Dependency struct { - Package struct { - Name string `json:"name"` - Ecosystem string `json:"ecosystem"` - } `json:"package"` - ManifestPath string `json:"manifest_path"` - } `json:"dependency"` - SecurityVulnerability struct { - Package struct { - Name string `json:"name"` - Ecosystem string `json:"ecosystem"` - } `json:"package"` - FirstPatchedVersion struct { - Identifier string `json:"identifier"` - } `json:"first_patched_version"` - VulnerableVersionRange string `json:"vulnerable_version_range"` - } `json:"security_vulnerability"` -} - -// CodeScanningAlert represents a code scanning alert. -type CodeScanningAlert struct { - Number int `json:"number"` - State string `json:"state"` - DismissedReason string `json:"dismissed_reason"` - Rule struct { - ID string `json:"id"` - Severity string `json:"severity"` - Description string `json:"description"` - Tags []string `json:"tags"` - } `json:"rule"` - Tool struct { - Name string `json:"name"` - Version string `json:"version"` - } `json:"tool"` - MostRecentInstance struct { - Location struct { - Path string `json:"path"` - StartLine int `json:"start_line"` - EndLine int `json:"end_line"` - } `json:"location"` - Message struct { - Text string `json:"text"` - } `json:"message"` - } `json:"most_recent_instance"` -} - -// SecretScanningAlert represents a secret scanning alert. -type SecretScanningAlert struct { - Number int `json:"number"` - State string `json:"state"` - SecretType string `json:"secret_type"` - Secret string `json:"secret"` - PushProtection bool `json:"push_protection_bypassed"` - Resolution string `json:"resolution"` -} - -// loadRegistry loads the repository registry. -func loadRegistry(registryPath string) (*repos.Registry, error) { - if registryPath != "" { - reg, err := repos.LoadRegistry(registryPath) - if err != nil { - return nil, cli.Wrap(err, "load registry") - } - return reg, nil - } - - path, err := repos.FindRegistry() - if err != nil { - return nil, cli.Wrap(err, "find registry") - } - reg, err := repos.LoadRegistry(path) - if err != nil { - return nil, cli.Wrap(err, "load registry") - } - return reg, nil -} - -// checkGH verifies gh CLI is available. -func checkGH() error { - if _, err := exec.LookPath("gh"); err != nil { - return fmt.Errorf(i18n.T("error.gh_not_found")) - } - return nil -} - -// runGHAPI runs a gh api command and returns the output. -func runGHAPI(endpoint string) ([]byte, error) { - cmd := exec.Command("gh", "api", endpoint, "--paginate") - output, err := cmd.Output() - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - stderr := string(exitErr.Stderr) - // Handle common errors gracefully - if strings.Contains(stderr, "404") || strings.Contains(stderr, "Not Found") { - return []byte("[]"), nil // Return empty array for not found - } - if strings.Contains(stderr, "403") { - return nil, fmt.Errorf("access denied (check token permissions)") - } - } - return nil, cli.Wrap(err, "run gh api") - } - return output, nil -} - -// severityRank returns a numeric rank for severity (higher = more severe). -func severityRank(severity string) int { - switch strings.ToLower(severity) { - case "critical": - return 4 - case "high": - return 3 - case "medium": - return 2 - case "low": - return 1 - default: - return 0 - } -} - -// severityStyle returns the appropriate style for a severity level. -func severityStyle(severity string) *cli.AnsiStyle { - switch strings.ToLower(severity) { - case "critical": - return cli.ErrorStyle - case "high": - return cli.WarningStyle - case "medium": - return cli.ValueStyle - default: - return cli.DimStyle - } -} - -// filterBySeverity checks if the severity matches the filter. -func filterBySeverity(severity, filter string) bool { - if filter == "" { - return true - } - - severities := strings.Split(strings.ToLower(filter), ",") - sev := strings.ToLower(severity) - - for _, s := range severities { - if strings.TrimSpace(s) == sev { - return true - } - } - return false -} - -// getReposToCheck returns the list of repos to check based on flags. -func getReposToCheck(reg *repos.Registry, repoFilter string) []*repos.Repo { - if repoFilter != "" { - if repo, ok := reg.Get(repoFilter); ok { - return []*repos.Repo{repo} - } - return nil - } - return reg.List() -} - -// AlertSummary holds aggregated alert counts. -type AlertSummary struct { - Critical int - High int - Medium int - Low int - Unknown int - Total int -} - -// Add increments summary counters for the provided severity. -func (s *AlertSummary) Add(severity string) { - s.Total++ - switch strings.ToLower(severity) { - case "critical": - s.Critical++ - case "high": - s.High++ - case "medium": - s.Medium++ - case "low": - s.Low++ - default: - s.Unknown++ - } -} - -// String renders a human-readable summary of alert counts. -func (s *AlertSummary) String() string { - parts := []string{} - if s.Critical > 0 { - parts = append(parts, cli.ErrorStyle.Render(fmt.Sprintf("%d critical", s.Critical))) - } - if s.High > 0 { - parts = append(parts, cli.WarningStyle.Render(fmt.Sprintf("%d high", s.High))) - } - if s.Medium > 0 { - parts = append(parts, cli.ValueStyle.Render(fmt.Sprintf("%d medium", s.Medium))) - } - if s.Low > 0 { - parts = append(parts, cli.DimStyle.Render(fmt.Sprintf("%d low", s.Low))) - } - if s.Unknown > 0 { - parts = append(parts, cli.DimStyle.Render(fmt.Sprintf("%d unknown", s.Unknown))) - } - if len(parts) == 0 { - return cli.SuccessStyle.Render("No alerts") - } - return strings.Join(parts, " | ") -} diff --git a/pkg/setup/cmd_bootstrap.go b/pkg/setup/cmd_bootstrap.go deleted file mode 100644 index d6e6dfb..0000000 --- a/pkg/setup/cmd_bootstrap.go +++ /dev/null @@ -1,175 +0,0 @@ -// cmd_bootstrap.go implements bootstrap mode for new workspaces. -// -// Bootstrap mode is activated when no repos.yaml exists in the current -// directory or any parent. It clones core-devops first, then uses its -// repos.yaml to present the package wizard. - -package setup - -import ( - "context" - "fmt" - "os" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" - "github.com/host-uk/core/pkg/workspace" -) - -// runSetupOrchestrator decides between registry mode and bootstrap mode. -func runSetupOrchestrator(registryPath, only string, dryRun, all bool, projectName string, runBuild bool) error { - ctx := context.Background() - - // Try to find an existing registry - var foundRegistry string - var err error - - if registryPath != "" { - foundRegistry = registryPath - } else { - foundRegistry, err = repos.FindRegistry() - } - - // If registry exists, use registry mode - if err == nil && foundRegistry != "" { - return runRegistrySetup(ctx, foundRegistry, only, dryRun, all, runBuild) - } - - // No registry found - enter bootstrap mode - return runBootstrap(ctx, only, dryRun, all, projectName, runBuild) -} - -// runBootstrap handles the case where no repos.yaml exists. -func runBootstrap(ctx context.Context, only string, dryRun, all bool, projectName string, runBuild bool) error { - cwd, err := os.Getwd() - if err != nil { - return fmt.Errorf("failed to get working directory: %w", err) - } - - fmt.Printf("%s %s\n", dimStyle.Render(">>"), i18n.T("cmd.setup.bootstrap_mode")) - - var targetDir string - - // Check if current directory is empty - empty, err := isDirEmpty(cwd) - if err != nil { - return fmt.Errorf("failed to check directory: %w", err) - } - - if empty { - // Clone into current directory - targetDir = cwd - fmt.Printf("%s %s\n", dimStyle.Render(">>"), i18n.T("cmd.setup.cloning_current_dir")) - } else { - // Directory has content - check if it's a git repo root - isRepo := isGitRepoRoot(cwd) - - if isRepo && isTerminal() && !all { - // Offer choice: setup working directory or create package - choice, err := promptSetupChoice() - if err != nil { - return fmt.Errorf("failed to get choice: %w", err) - } - - if choice == "setup" { - // Setup this working directory with .core/ config - return runRepoSetup(cwd, dryRun) - } - // Otherwise continue to "create package" flow - } - - // Create package flow - need a project name - if projectName == "" { - if !isTerminal() || all { - projectName = defaultOrg - } else { - projectName, err = promptProjectName(defaultOrg) - if err != nil { - return fmt.Errorf("failed to get project name: %w", err) - } - } - } - - targetDir = filepath.Join(cwd, projectName) - fmt.Printf("%s %s: %s\n", dimStyle.Render(">>"), i18n.T("cmd.setup.creating_project_dir"), projectName) - - if !dryRun { - if err := os.MkdirAll(targetDir, 0755); err != nil { - return fmt.Errorf("failed to create directory: %w", err) - } - } - } - - // Clone core-devops first - devopsPath := filepath.Join(targetDir, devopsRepo) - if _, err := os.Stat(filepath.Join(devopsPath, ".git")); os.IsNotExist(err) { - fmt.Printf("%s %s %s...\n", dimStyle.Render(">>"), i18n.T("common.status.cloning"), devopsRepo) - - if !dryRun { - if err := gitClone(ctx, defaultOrg, devopsRepo, devopsPath); err != nil { - return fmt.Errorf("failed to clone %s: %w", devopsRepo, err) - } - fmt.Printf("%s %s %s\n", successStyle.Render(">>"), devopsRepo, i18n.T("cmd.setup.cloned")) - } else { - fmt.Printf(" %s %s/%s to %s\n", i18n.T("cmd.setup.would_clone"), defaultOrg, devopsRepo, devopsPath) - } - } else { - fmt.Printf("%s %s %s\n", dimStyle.Render(">>"), devopsRepo, i18n.T("cmd.setup.already_exists")) - } - - // Load the repos.yaml from core-devops - registryPath := filepath.Join(devopsPath, devopsReposYaml) - - if dryRun { - fmt.Printf("\n%s %s %s\n", dimStyle.Render(">>"), i18n.T("cmd.setup.would_load_registry"), registryPath) - return nil - } - - reg, err := repos.LoadRegistry(registryPath) - if err != nil { - return fmt.Errorf("failed to load registry from %s: %w", devopsRepo, err) - } - - // Override base path to target directory - reg.BasePath = targetDir - - // Check workspace config for default_only if no filter specified - if only == "" { - if wsConfig, err := workspace.LoadConfig(devopsPath); err == nil && wsConfig != nil && len(wsConfig.DefaultOnly) > 0 { - only = strings.Join(wsConfig.DefaultOnly, ",") - } - } - - // Now run the regular setup with the loaded registry - return runRegistrySetupWithReg(ctx, reg, registryPath, only, dryRun, all, runBuild) -} - -// isGitRepoRoot returns true if the directory is a git repository root. -func isGitRepoRoot(path string) bool { - _, err := os.Stat(filepath.Join(path, ".git")) - return err == nil -} - -// isDirEmpty returns true if the directory is empty or contains only hidden files. -func isDirEmpty(path string) (bool, error) { - entries, err := os.ReadDir(path) - if err != nil { - return false, err - } - - for _, e := range entries { - name := e.Name() - // Ignore common hidden/metadata files - if name == ".DS_Store" || name == ".git" || name == ".gitignore" { - continue - } - // Any other non-hidden file means directory is not empty - if len(name) > 0 && name[0] != '.' { - return false, nil - } - } - - return true, nil -} diff --git a/pkg/setup/cmd_commands.go b/pkg/setup/cmd_commands.go deleted file mode 100644 index 4bf46a6..0000000 --- a/pkg/setup/cmd_commands.go +++ /dev/null @@ -1,38 +0,0 @@ -// Package setup provides workspace bootstrap and package cloning commands. -// -// Two modes of operation: -// -// REGISTRY MODE (repos.yaml exists): -// - Clones all repositories defined in repos.yaml into packages/ -// - Skips repos that already exist -// - Supports filtering by type with --only -// -// BOOTSTRAP MODE (no repos.yaml): -// - Clones core-devops to set up the workspace foundation -// - Presents an interactive wizard to select packages (unless --all) -// - Clones selected packages -// -// Flags: -// - --registry: Path to repos.yaml (auto-detected if not specified) -// - --only: Filter by repo type (foundation, module, product) -// - --dry-run: Preview what would be cloned -// - --all: Skip wizard, clone all packages (non-interactive) -// - --name: Project directory name for bootstrap mode -// - --build: Run build after cloning -// -// Uses gh CLI with HTTPS when authenticated, falls back to SSH. -package setup - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/spf13/cobra" -) - -func init() { - cli.RegisterCommands(AddSetupCommands) -} - -// AddSetupCommands registers the 'setup' command and all subcommands. -func AddSetupCommands(root *cobra.Command) { - AddSetupCommand(root) -} diff --git a/pkg/setup/cmd_github.go b/pkg/setup/cmd_github.go deleted file mode 100644 index 47a20e0..0000000 --- a/pkg/setup/cmd_github.go +++ /dev/null @@ -1,229 +0,0 @@ -// cmd_github.go implements the 'setup github' command for configuring -// GitHub repositories with organization standards. -// -// Usage: -// core setup github [flags] -// -// Flags: -// -r, --repo string Specific repo to setup -// -a, --all Setup all repos in registry -// -l, --labels Only sync labels -// -w, --webhooks Only sync webhooks -// -p, --protection Only sync branch protection -// -s, --security Only sync security settings -// -c, --check Dry-run: show what would change -// --config string Path to github.yaml config -// --verbose Show detailed output - -package setup - -import ( - "errors" - "os/exec" - "path/filepath" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" - "github.com/spf13/cobra" -) - -// GitHub command flags -var ( - ghRepo string - ghAll bool - ghLabels bool - ghWebhooks bool - ghProtection bool - ghSecurity bool - ghCheck bool - ghConfigPath string - ghVerbose bool -) - -// addGitHubCommand adds the 'github' subcommand to the setup command. -func addGitHubCommand(parent *cobra.Command) { - ghCmd := &cobra.Command{ - Use: "github", - Aliases: []string{"gh"}, - Short: i18n.T("cmd.setup.github.short"), - Long: i18n.T("cmd.setup.github.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return runGitHubSetup() - }, - } - - ghCmd.Flags().StringVarP(&ghRepo, "repo", "r", "", i18n.T("cmd.setup.github.flag.repo")) - ghCmd.Flags().BoolVarP(&ghAll, "all", "a", false, i18n.T("cmd.setup.github.flag.all")) - ghCmd.Flags().BoolVarP(&ghLabels, "labels", "l", false, i18n.T("cmd.setup.github.flag.labels")) - ghCmd.Flags().BoolVarP(&ghWebhooks, "webhooks", "w", false, i18n.T("cmd.setup.github.flag.webhooks")) - ghCmd.Flags().BoolVarP(&ghProtection, "protection", "p", false, i18n.T("cmd.setup.github.flag.protection")) - ghCmd.Flags().BoolVarP(&ghSecurity, "security", "s", false, i18n.T("cmd.setup.github.flag.security")) - ghCmd.Flags().BoolVarP(&ghCheck, "check", "c", false, i18n.T("cmd.setup.github.flag.check")) - ghCmd.Flags().StringVar(&ghConfigPath, "config", "", i18n.T("cmd.setup.github.flag.config")) - ghCmd.Flags().BoolVarP(&ghVerbose, "verbose", "v", false, i18n.T("common.flag.verbose")) - - parent.AddCommand(ghCmd) -} - -func runGitHubSetup() error { - // Check gh is available - if _, err := exec.LookPath("gh"); err != nil { - return errors.New(i18n.T("error.gh_not_found")) - } - - // Check gh is authenticated - if !cli.GhAuthenticated() { - return errors.New(i18n.T("cmd.setup.github.error.not_authenticated")) - } - - // Find registry - registryPath, err := repos.FindRegistry() - if err != nil { - return cli.Wrap(err, i18n.T("error.registry_not_found")) - } - - reg, err := repos.LoadRegistry(registryPath) - if err != nil { - return cli.Wrap(err, "failed to load registry") - } - - registryDir := filepath.Dir(registryPath) - - // Find GitHub config - configPath, err := FindGitHubConfig(registryDir, ghConfigPath) - if err != nil { - return cli.Wrap(err, i18n.T("cmd.setup.github.error.config_not_found")) - } - - config, err := LoadGitHubConfig(configPath) - if err != nil { - return cli.Wrap(err, "failed to load GitHub config") - } - - if err := config.Validate(); err != nil { - return cli.Wrap(err, "invalid GitHub config") - } - - // Print header - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("registry")), registryPath) - cli.Print("%s %s\n", dimStyle.Render(i18n.Label("config")), configPath) - - if ghCheck { - cli.Print("%s\n", warningStyle.Render(i18n.T("cmd.setup.github.dry_run_mode"))) - } - - // Determine which repos to process - var reposToProcess []*repos.Repo - - // Reject conflicting flags - if ghRepo != "" && ghAll { - return errors.New(i18n.T("cmd.setup.github.error.conflicting_flags")) - } - - if ghRepo != "" { - // Single repo mode - repo, ok := reg.Get(ghRepo) - if !ok { - return errors.New(i18n.T("error.repo_not_found", map[string]interface{}{"Name": ghRepo})) - } - reposToProcess = []*repos.Repo{repo} - } else if ghAll { - // All repos mode - reposToProcess = reg.List() - } else { - // No repos specified - cli.Print("\n%s\n", i18n.T("cmd.setup.github.no_repos_specified")) - cli.Print(" %s\n", i18n.T("cmd.setup.github.usage_hint")) - return nil - } - - // Determine which operations to run - runAll := !ghLabels && !ghWebhooks && !ghProtection && !ghSecurity - runLabels := runAll || ghLabels - runWebhooks := runAll || ghWebhooks - runProtection := runAll || ghProtection - runSecurity := runAll || ghSecurity - - // Process each repo - aggregate := NewAggregate() - - for i, repo := range reposToProcess { - repoFullName := cli.Sprintf("%s/%s", reg.Org, repo.Name) - - // Show progress - cli.Print("\033[2K\r%s %d/%d %s", - dimStyle.Render(i18n.T("common.progress.checking")), - i+1, len(reposToProcess), repo.Name) - - changes := NewChangeSet(repo.Name) - - // Sync labels - if runLabels { - labelChanges, err := SyncLabels(repoFullName, config, ghCheck) - if err != nil { - cli.Print("\033[2K\r") - cli.Print("%s %s: %s\n", errorStyle.Render(cli.Glyph(":cross:")), repo.Name, err) - aggregate.Add(changes) // Preserve partial results - continue - } - changes.Changes = append(changes.Changes, labelChanges.Changes...) - } - - // Sync webhooks - if runWebhooks { - webhookChanges, err := SyncWebhooks(repoFullName, config, ghCheck) - if err != nil { - cli.Print("\033[2K\r") - cli.Print("%s %s: %s\n", errorStyle.Render(cli.Glyph(":cross:")), repo.Name, err) - aggregate.Add(changes) // Preserve partial results - continue - } - changes.Changes = append(changes.Changes, webhookChanges.Changes...) - } - - // Sync branch protection - if runProtection { - protectionChanges, err := SyncBranchProtection(repoFullName, config, ghCheck) - if err != nil { - cli.Print("\033[2K\r") - cli.Print("%s %s: %s\n", errorStyle.Render(cli.Glyph(":cross:")), repo.Name, err) - aggregate.Add(changes) // Preserve partial results - continue - } - changes.Changes = append(changes.Changes, protectionChanges.Changes...) - } - - // Sync security settings - if runSecurity { - securityChanges, err := SyncSecuritySettings(repoFullName, config, ghCheck) - if err != nil { - cli.Print("\033[2K\r") - cli.Print("%s %s: %s\n", errorStyle.Render(cli.Glyph(":cross:")), repo.Name, err) - aggregate.Add(changes) // Preserve partial results - continue - } - changes.Changes = append(changes.Changes, securityChanges.Changes...) - } - - aggregate.Add(changes) - } - - // Clear progress line - cli.Print("\033[2K\r") - - // Print results - for _, cs := range aggregate.Sets { - cs.Print(ghVerbose || ghCheck) - } - - // Print summary - aggregate.PrintSummary() - - // Suggest permission fix if needed - if ghCheck { - cli.Print("\n%s\n", i18n.T("cmd.setup.github.run_without_check")) - } - - return nil -} diff --git a/pkg/setup/cmd_registry.go b/pkg/setup/cmd_registry.go deleted file mode 100644 index 250cd0f..0000000 --- a/pkg/setup/cmd_registry.go +++ /dev/null @@ -1,262 +0,0 @@ -// cmd_registry.go implements registry mode for cloning packages. -// -// Registry mode is activated when a repos.yaml exists. It reads the registry -// and clones all (or selected) packages into the configured packages directory. - -package setup - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" - "github.com/host-uk/core/pkg/workspace" -) - -// runRegistrySetup loads a registry from path and runs setup. -func runRegistrySetup(ctx context.Context, registryPath, only string, dryRun, all, runBuild bool) error { - reg, err := repos.LoadRegistry(registryPath) - if err != nil { - return fmt.Errorf("failed to load registry: %w", err) - } - - // Check workspace config for default_only if no filter specified - if only == "" { - registryDir := filepath.Dir(registryPath) - if wsConfig, err := workspace.LoadConfig(registryDir); err == nil && wsConfig != nil && len(wsConfig.DefaultOnly) > 0 { - only = strings.Join(wsConfig.DefaultOnly, ",") - } - } - - return runRegistrySetupWithReg(ctx, reg, registryPath, only, dryRun, all, runBuild) -} - -// runRegistrySetupWithReg runs setup with an already-loaded registry. -func runRegistrySetupWithReg(ctx context.Context, reg *repos.Registry, registryPath, only string, dryRun, all, runBuild bool) error { - fmt.Printf("%s %s\n", dimStyle.Render(i18n.Label("registry")), registryPath) - fmt.Printf("%s %s\n", dimStyle.Render(i18n.T("cmd.setup.org_label")), reg.Org) - - registryDir := filepath.Dir(registryPath) - - // Determine base path for cloning - basePath := reg.BasePath - if basePath == "" { - // Load workspace config to see if packages_dir is set (ignore errors, fall back to default) - wsConfig, _ := workspace.LoadConfig(registryDir) - if wsConfig != nil && wsConfig.PackagesDir != "" { - basePath = wsConfig.PackagesDir - } else { - basePath = "./packages" - } - } - - // Expand ~ - if strings.HasPrefix(basePath, "~/") { - home, _ := os.UserHomeDir() - basePath = filepath.Join(home, basePath[2:]) - } - - // Resolve relative to registry location - if !filepath.IsAbs(basePath) { - basePath = filepath.Join(registryDir, basePath) - } - - fmt.Printf("%s %s\n", dimStyle.Render(i18n.Label("target")), basePath) - - // Parse type filter - var typeFilter []string - if only != "" { - for _, t := range strings.Split(only, ",") { - typeFilter = append(typeFilter, strings.TrimSpace(t)) - } - fmt.Printf("%s %s\n", dimStyle.Render(i18n.Label("filter")), only) - } - - // Ensure base path exists - if !dryRun { - if err := os.MkdirAll(basePath, 0755); err != nil { - return fmt.Errorf("failed to create packages directory: %w", err) - } - } - - // Get all available repos - allRepos := reg.List() - - // Determine which repos to clone - var toClone []*repos.Repo - var skipped, exists int - - // Use wizard in interactive mode, unless --all specified - useWizard := isTerminal() && !all && !dryRun - - if useWizard { - selected, err := runPackageWizard(reg, typeFilter) - if err != nil { - return fmt.Errorf("wizard error: %w", err) - } - - // Build set of selected repos - selectedSet := make(map[string]bool) - for _, name := range selected { - selectedSet[name] = true - } - - // Filter repos based on selection - for _, repo := range allRepos { - if !selectedSet[repo.Name] { - skipped++ - continue - } - - // Check if already exists - repoPath := filepath.Join(basePath, repo.Name) - if _, err := os.Stat(filepath.Join(repoPath, ".git")); err == nil { - exists++ - continue - } - - toClone = append(toClone, repo) - } - } else { - // Non-interactive: filter by type - typeFilterSet := make(map[string]bool) - for _, t := range typeFilter { - typeFilterSet[t] = true - } - - for _, repo := range allRepos { - // Skip if type filter doesn't match (when filter is specified) - if len(typeFilterSet) > 0 && !typeFilterSet[repo.Type] { - skipped++ - continue - } - - // Skip if clone: false - if repo.Clone != nil && !*repo.Clone { - skipped++ - continue - } - - // Check if already exists - repoPath := filepath.Join(basePath, repo.Name) - if _, err := os.Stat(filepath.Join(repoPath, ".git")); err == nil { - exists++ - continue - } - - toClone = append(toClone, repo) - } - } - - // Summary - fmt.Println() - fmt.Printf("%s, %s, %s\n", - i18n.T("cmd.setup.to_clone", map[string]interface{}{"Count": len(toClone)}), - i18n.T("cmd.setup.exist", map[string]interface{}{"Count": exists}), - i18n.T("common.count.skipped", map[string]interface{}{"Count": skipped})) - - if len(toClone) == 0 { - fmt.Printf("\n%s\n", i18n.T("cmd.setup.nothing_to_clone")) - return nil - } - - if dryRun { - fmt.Printf("\n%s\n", i18n.T("cmd.setup.would_clone_list")) - for _, repo := range toClone { - fmt.Printf(" %s (%s)\n", repoNameStyle.Render(repo.Name), repo.Type) - } - return nil - } - - // Confirm in interactive mode - if useWizard { - confirmed, err := confirmClone(len(toClone), basePath) - if err != nil { - return err - } - if !confirmed { - fmt.Println(i18n.T("cmd.setup.cancelled")) - return nil - } - } - - // Clone repos - fmt.Println() - var succeeded, failed int - - for _, repo := range toClone { - fmt.Printf(" %s %s... ", dimStyle.Render(i18n.T("common.status.cloning")), repo.Name) - - repoPath := filepath.Join(basePath, repo.Name) - - err := gitClone(ctx, reg.Org, repo.Name, repoPath) - if err != nil { - fmt.Printf("%s\n", errorStyle.Render("x "+err.Error())) - failed++ - } else { - fmt.Printf("%s\n", successStyle.Render(i18n.T("cmd.setup.done"))) - succeeded++ - } - } - - // Summary - fmt.Println() - fmt.Printf("%s %s", successStyle.Render(i18n.Label("done")), i18n.T("cmd.setup.cloned_count", map[string]interface{}{"Count": succeeded})) - if failed > 0 { - fmt.Printf(", %s", errorStyle.Render(i18n.T("i18n.count.failed", failed))) - } - if exists > 0 { - fmt.Printf(", %s", i18n.T("cmd.setup.already_exist_count", map[string]interface{}{"Count": exists})) - } - fmt.Println() - - // Run build if requested - if runBuild && succeeded > 0 { - fmt.Println() - fmt.Printf("%s %s\n", dimStyle.Render(">>"), i18n.ProgressSubject("run", "build")) - buildCmd := exec.Command("core", "build") - buildCmd.Dir = basePath - buildCmd.Stdout = os.Stdout - buildCmd.Stderr = os.Stderr - if err := buildCmd.Run(); err != nil { - return fmt.Errorf("%s: %w", i18n.T("i18n.fail.run", "build"), err) - } - } - - return nil -} - -// gitClone clones a repository using gh CLI or git. -func gitClone(ctx context.Context, org, repo, path string) error { - // Try gh clone first with HTTPS (works without SSH keys) - if cli.GhAuthenticated() { - // Use HTTPS URL directly to bypass git_protocol config - httpsURL := fmt.Sprintf("https://github.com/%s/%s.git", org, repo) - cmd := exec.CommandContext(ctx, "gh", "repo", "clone", httpsURL, path) - output, err := cmd.CombinedOutput() - if err == nil { - return nil - } - errStr := strings.TrimSpace(string(output)) - // Only fall through to SSH if it's an auth error - if !strings.Contains(errStr, "Permission denied") && - !strings.Contains(errStr, "could not read") { - return fmt.Errorf("%s", errStr) - } - } - - // Fallback to git clone via SSH - url := fmt.Sprintf("git@github.com:%s/%s.git", org, repo) - cmd := exec.CommandContext(ctx, "git", "clone", url, path) - output, err := cmd.CombinedOutput() - if err != nil { - return fmt.Errorf("%s", strings.TrimSpace(string(output))) - } - return nil -} diff --git a/pkg/setup/cmd_repo.go b/pkg/setup/cmd_repo.go deleted file mode 100644 index 330313a..0000000 --- a/pkg/setup/cmd_repo.go +++ /dev/null @@ -1,289 +0,0 @@ -// cmd_repo.go implements repository setup with .core/ configuration. -// -// When running setup in an existing git repository, this generates -// build.yaml, release.yaml, and test.yaml configurations based on -// detected project type. - -package setup - -import ( - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/host-uk/core/pkg/i18n" -) - -// runRepoSetup sets up the current repository with .core/ configuration. -func runRepoSetup(repoPath string, dryRun bool) error { - fmt.Printf("%s %s: %s\n", dimStyle.Render(">>"), i18n.T("cmd.setup.repo.setting_up"), repoPath) - - // Detect project type - projectType := detectProjectType(repoPath) - fmt.Printf("%s %s: %s\n", dimStyle.Render(">>"), i18n.T("cmd.setup.repo.detected_type"), projectType) - - // Create .core directory - coreDir := filepath.Join(repoPath, ".core") - if !dryRun { - if err := os.MkdirAll(coreDir, 0755); err != nil { - return fmt.Errorf("failed to create .core directory: %w", err) - } - } - - // Generate configs based on project type - name := filepath.Base(repoPath) - configs := map[string]string{ - "build.yaml": generateBuildConfig(repoPath, projectType), - "release.yaml": generateReleaseConfig(name, projectType), - "test.yaml": generateTestConfig(projectType), - } - - if dryRun { - fmt.Printf("\n%s %s:\n", dimStyle.Render(">>"), i18n.T("cmd.setup.repo.would_create")) - for filename, content := range configs { - fmt.Printf("\n %s:\n", filepath.Join(coreDir, filename)) - // Indent content for display - for _, line := range strings.Split(content, "\n") { - fmt.Printf(" %s\n", line) - } - } - return nil - } - - for filename, content := range configs { - configPath := filepath.Join(coreDir, filename) - if err := os.WriteFile(configPath, []byte(content), 0644); err != nil { - return fmt.Errorf("failed to write %s: %w", filename, err) - } - fmt.Printf("%s %s %s\n", successStyle.Render(">>"), i18n.T("cmd.setup.repo.created"), configPath) - } - - return nil -} - -// detectProjectType identifies the project type from files present. -func detectProjectType(path string) string { - // Check in priority order - if _, err := os.Stat(filepath.Join(path, "wails.json")); err == nil { - return "wails" - } - if _, err := os.Stat(filepath.Join(path, "go.mod")); err == nil { - return "go" - } - if _, err := os.Stat(filepath.Join(path, "composer.json")); err == nil { - return "php" - } - if _, err := os.Stat(filepath.Join(path, "package.json")); err == nil { - return "node" - } - return "unknown" -} - -// generateBuildConfig creates a build.yaml configuration based on project type. -func generateBuildConfig(path, projectType string) string { - name := filepath.Base(path) - - switch projectType { - case "go", "wails": - return fmt.Sprintf(`version: 1 -project: - name: %s - description: Go application - main: ./cmd/%s - binary: %s -build: - cgo: false - flags: - - -trimpath - ldflags: - - -s - - -w -targets: - - os: linux - arch: amd64 - - os: linux - arch: arm64 - - os: darwin - arch: amd64 - - os: darwin - arch: arm64 - - os: windows - arch: amd64 -`, name, name, name) - - case "php": - return fmt.Sprintf(`version: 1 -project: - name: %s - description: PHP application - type: php -build: - dockerfile: Dockerfile - image: %s -`, name, name) - - case "node": - return fmt.Sprintf(`version: 1 -project: - name: %s - description: Node.js application - type: node -build: - script: npm run build - output: dist -`, name) - - default: - return fmt.Sprintf(`version: 1 -project: - name: %s - description: Application -`, name) - } -} - -// generateReleaseConfig creates a release.yaml configuration. -func generateReleaseConfig(name, projectType string) string { - // Try to detect GitHub repo from git remote - repo := detectGitHubRepo() - if repo == "" { - repo = "owner/" + name - } - - base := fmt.Sprintf(`version: 1 -project: - name: %s - repository: %s -`, name, repo) - - switch projectType { - case "go", "wails": - return base + ` -changelog: - include: - - feat - - fix - - perf - - refactor - exclude: - - chore - - docs - - style - - test - -publishers: - - type: github - draft: false - prerelease: false -` - case "php": - return base + ` -changelog: - include: - - feat - - fix - - perf - -publishers: - - type: github - draft: false -` - default: - return base + ` -changelog: - include: - - feat - - fix - -publishers: - - type: github -` - } -} - -// generateTestConfig creates a test.yaml configuration. -func generateTestConfig(projectType string) string { - switch projectType { - case "go", "wails": - return `version: 1 - -commands: - - name: unit - run: go test ./... - - name: coverage - run: go test -coverprofile=coverage.out ./... - - name: race - run: go test -race ./... - -env: - CGO_ENABLED: "0" -` - case "php": - return `version: 1 - -commands: - - name: unit - run: vendor/bin/pest --parallel - - name: types - run: vendor/bin/phpstan analyse - - name: lint - run: vendor/bin/pint --test - -env: - APP_ENV: testing - DB_CONNECTION: sqlite -` - case "node": - return `version: 1 - -commands: - - name: unit - run: npm test - - name: lint - run: npm run lint - - name: typecheck - run: npm run typecheck - -env: - NODE_ENV: test -` - default: - return `version: 1 - -commands: - - name: test - run: echo "No tests configured" -` - } -} - -// detectGitHubRepo tries to extract owner/repo from git remote. -func detectGitHubRepo() string { - cmd := exec.Command("git", "remote", "get-url", "origin") - output, err := cmd.Output() - if err != nil { - return "" - } - - url := strings.TrimSpace(string(output)) - - // Handle SSH format: git@github.com:owner/repo.git - if strings.HasPrefix(url, "git@github.com:") { - repo := strings.TrimPrefix(url, "git@github.com:") - repo = strings.TrimSuffix(repo, ".git") - return repo - } - - // Handle HTTPS format: https://github.com/owner/repo.git - if strings.Contains(url, "github.com/") { - parts := strings.Split(url, "github.com/") - if len(parts) == 2 { - repo := strings.TrimSuffix(parts[1], ".git") - return repo - } - } - - return "" -} diff --git a/pkg/setup/cmd_setup.go b/pkg/setup/cmd_setup.go deleted file mode 100644 index adfc28f..0000000 --- a/pkg/setup/cmd_setup.go +++ /dev/null @@ -1,59 +0,0 @@ -// Package setup provides workspace setup and bootstrap commands. -package setup - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -// Style aliases from shared package -var ( - repoNameStyle = cli.RepoStyle - successStyle = cli.SuccessStyle - errorStyle = cli.ErrorStyle - warningStyle = cli.WarningStyle - dimStyle = cli.DimStyle -) - -// Default organization and devops repo for bootstrap -const ( - defaultOrg = "host-uk" - devopsRepo = "core-devops" - devopsReposYaml = "repos.yaml" -) - -// Setup command flags -var ( - registryPath string - only string - dryRun bool - all bool - name string - build bool -) - -var setupCmd = &cobra.Command{ - Use: "setup", - Short: i18n.T("cmd.setup.short"), - Long: i18n.T("cmd.setup.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return runSetupOrchestrator(registryPath, only, dryRun, all, name, build) - }, -} - -func initSetupFlags() { - setupCmd.Flags().StringVar(®istryPath, "registry", "", i18n.T("cmd.setup.flag.registry")) - setupCmd.Flags().StringVar(&only, "only", "", i18n.T("cmd.setup.flag.only")) - setupCmd.Flags().BoolVar(&dryRun, "dry-run", false, i18n.T("cmd.setup.flag.dry_run")) - setupCmd.Flags().BoolVar(&all, "all", false, i18n.T("cmd.setup.flag.all")) - setupCmd.Flags().StringVar(&name, "name", "", i18n.T("cmd.setup.flag.name")) - setupCmd.Flags().BoolVar(&build, "build", false, i18n.T("cmd.setup.flag.build")) -} - -// AddSetupCommand adds the 'setup' command to the given parent command. -func AddSetupCommand(root *cobra.Command) { - initSetupFlags() - addGitHubCommand(setupCmd) - root.AddCommand(setupCmd) -} diff --git a/pkg/setup/cmd_wizard.go b/pkg/setup/cmd_wizard.go deleted file mode 100644 index d141faf..0000000 --- a/pkg/setup/cmd_wizard.go +++ /dev/null @@ -1,93 +0,0 @@ -// cmd_wizard.go implements the interactive package selection wizard. -package setup - -import ( - "fmt" - "os" - "sort" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/host-uk/core/pkg/repos" - "golang.org/x/term" -) - -// isTerminal returns true if stdin is a terminal. -func isTerminal() bool { - return term.IsTerminal(int(os.Stdin.Fd())) -} - -// promptSetupChoice asks the user whether to setup the working directory or create a package. -func promptSetupChoice() (string, error) { - fmt.Println(cli.TitleStyle.Render(i18n.T("cmd.setup.wizard.git_repo_title"))) - fmt.Println(i18n.T("cmd.setup.wizard.what_to_do")) - - choice, err := cli.Select("Choose action", []string{"setup", "package"}) - if err != nil { - return "", err - } - return choice, nil -} - -// promptProjectName asks the user for a project directory name. -func promptProjectName(defaultName string) (string, error) { - fmt.Println(cli.TitleStyle.Render(i18n.T("cmd.setup.wizard.project_name_title"))) - return cli.Prompt(i18n.T("cmd.setup.wizard.project_name_desc"), defaultName) -} - -// runPackageWizard presents an interactive multi-select UI for package selection. -func runPackageWizard(reg *repos.Registry, preselectedTypes []string) ([]string, error) { - allRepos := reg.List() - - // Build options - var options []string - - // Sort by name - sort.Slice(allRepos, func(i, j int) bool { - return allRepos[i].Name < allRepos[j].Name - }) - - for _, repo := range allRepos { - if repo.Clone != nil && !*repo.Clone { - continue - } - // Format: name (type) - label := fmt.Sprintf("%s (%s)", repo.Name, repo.Type) - options = append(options, label) - } - - fmt.Println(cli.TitleStyle.Render(i18n.T("cmd.setup.wizard.package_selection"))) - fmt.Println(i18n.T("cmd.setup.wizard.selection_hint")) - - selectedLabels, err := cli.MultiSelect(i18n.T("cmd.setup.wizard.select_packages"), options) - if err != nil { - return nil, err - } - - // Extract names from labels - var selected []string - for _, label := range selectedLabels { - // Basic parsing assuming "name (type)" format - // Find last space - var name string - // Since we constructed it, we know it ends with (type) - // but repo name might have spaces? Repos usually don't. - // Let's iterate repos to find match - for _, repo := range allRepos { - if label == fmt.Sprintf("%s (%s)", repo.Name, repo.Type) { - name = repo.Name - break - } - } - if name != "" { - selected = append(selected, name) - } - } - return selected, nil -} - -// confirmClone asks for confirmation before cloning. -func confirmClone(count int, target string) (bool, error) { - confirmed := cli.Confirm(i18n.T("cmd.setup.wizard.confirm_clone", map[string]interface{}{"Count": count, "Target": target})) - return confirmed, nil -} \ No newline at end of file diff --git a/pkg/setup/github_config.go b/pkg/setup/github_config.go deleted file mode 100644 index 3c67345..0000000 --- a/pkg/setup/github_config.go +++ /dev/null @@ -1,203 +0,0 @@ -// github_config.go defines configuration types for GitHub repository setup. -// -// Configuration is loaded from .core/github.yaml and supports environment -// variable expansion using ${VAR} or ${VAR:-default} syntax. - -package setup - -import ( - "fmt" - "os" - "path/filepath" - "regexp" - "strings" - - "gopkg.in/yaml.v3" -) - -// GitHubConfig represents the full GitHub setup configuration. -type GitHubConfig struct { - Version int `yaml:"version"` - Labels []LabelConfig `yaml:"labels"` - Webhooks map[string]WebhookConfig `yaml:"webhooks"` - BranchProtection []BranchProtectionConfig `yaml:"branch_protection"` - Security SecurityConfig `yaml:"security"` -} - -// LabelConfig defines a GitHub issue/PR label. -type LabelConfig struct { - Name string `yaml:"name"` - Color string `yaml:"color"` - Description string `yaml:"description"` -} - -// WebhookConfig defines a GitHub webhook configuration. -type WebhookConfig struct { - URL string `yaml:"url"` // Webhook URL (supports ${ENV_VAR}) - ContentType string `yaml:"content_type"` // json or form (default: json) - Secret string `yaml:"secret"` // Optional secret (supports ${ENV_VAR}) - Events []string `yaml:"events"` // Events to trigger on - Active *bool `yaml:"active"` // Whether webhook is active (default: true) -} - -// BranchProtectionConfig defines branch protection rules. -type BranchProtectionConfig struct { - Branch string `yaml:"branch"` - RequiredReviews int `yaml:"required_reviews"` - DismissStale bool `yaml:"dismiss_stale"` - RequireCodeOwnerReviews bool `yaml:"require_code_owner_reviews"` - RequiredStatusChecks []string `yaml:"required_status_checks"` - RequireLinearHistory bool `yaml:"require_linear_history"` - AllowForcePushes bool `yaml:"allow_force_pushes"` - AllowDeletions bool `yaml:"allow_deletions"` - EnforceAdmins bool `yaml:"enforce_admins"` - RequireConversationResolution bool `yaml:"require_conversation_resolution"` -} - -// SecurityConfig defines repository security settings. -type SecurityConfig struct { - DependabotAlerts bool `yaml:"dependabot_alerts"` - DependabotSecurityUpdates bool `yaml:"dependabot_security_updates"` - SecretScanning bool `yaml:"secret_scanning"` - SecretScanningPushProtection bool `yaml:"push_protection"` -} - -// LoadGitHubConfig reads and parses a GitHub configuration file. -func LoadGitHubConfig(path string) (*GitHubConfig, error) { - data, err := os.ReadFile(path) - if err != nil { - return nil, fmt.Errorf("failed to read config file: %w", err) - } - - // Expand environment variables before parsing - expanded := expandEnvVars(string(data)) - - var config GitHubConfig - if err := yaml.Unmarshal([]byte(expanded), &config); err != nil { - return nil, fmt.Errorf("failed to parse config file: %w", err) - } - - // Set defaults - for i := range config.Webhooks { - wh := config.Webhooks[i] - if wh.ContentType == "" { - wh.ContentType = "json" - } - if wh.Active == nil { - active := true - wh.Active = &active - } - config.Webhooks[i] = wh - } - - return &config, nil -} - -// envVarPattern matches ${VAR} or ${VAR:-default} patterns. -var envVarPattern = regexp.MustCompile(`\$\{([A-Za-z_][A-Za-z0-9_]*)(?::-([^}]*))?\}`) - -// expandEnvVars expands environment variables in the input string. -// Supports ${VAR} and ${VAR:-default} syntax. -func expandEnvVars(input string) string { - return envVarPattern.ReplaceAllStringFunc(input, func(match string) string { - // Parse the match - submatch := envVarPattern.FindStringSubmatch(match) - if len(submatch) < 2 { - return match - } - - varName := submatch[1] - defaultValue := "" - if len(submatch) >= 3 { - defaultValue = submatch[2] - } - - // Look up the environment variable - if value, ok := os.LookupEnv(varName); ok { - return value - } - return defaultValue - }) -} - -// FindGitHubConfig searches for github.yaml in common locations. -// Search order: -// 1. Specified path (if non-empty) -// 2. .core/github.yaml (relative to registry) -// 3. github.yaml (relative to registry) -func FindGitHubConfig(registryDir, specifiedPath string) (string, error) { - if specifiedPath != "" { - if _, err := os.Stat(specifiedPath); err == nil { - return specifiedPath, nil - } - return "", fmt.Errorf("config file not found: %s", specifiedPath) - } - - // Search in common locations (using filepath.Join for OS-portable paths) - candidates := []string{ - filepath.Join(registryDir, ".core", "github.yaml"), - filepath.Join(registryDir, "github.yaml"), - } - - for _, path := range candidates { - if _, err := os.Stat(path); err == nil { - return path, nil - } - } - - return "", fmt.Errorf("github.yaml not found in %s/.core/ or %s/", registryDir, registryDir) -} - -// Validate checks the configuration for errors. -func (c *GitHubConfig) Validate() error { - if c.Version != 1 { - return fmt.Errorf("unsupported config version: %d (expected 1)", c.Version) - } - - // Validate labels - for i, label := range c.Labels { - if label.Name == "" { - return fmt.Errorf("label %d: name is required", i+1) - } - if label.Color == "" { - return fmt.Errorf("label %q: color is required", label.Name) - } - // Validate color format (hex without #) - if !isValidHexColor(label.Color) { - return fmt.Errorf("label %q: invalid color %q (expected 6-digit hex without #)", label.Name, label.Color) - } - } - - // Validate webhooks (skip those with empty URLs - allows optional webhooks via env vars) - for name, wh := range c.Webhooks { - if wh.URL == "" { - // Empty URL is allowed - webhook will be skipped during sync - continue - } - if len(wh.Events) == 0 { - return fmt.Errorf("webhook %q: at least one event is required", name) - } - } - - // Validate branch protection - for i, bp := range c.BranchProtection { - if bp.Branch == "" { - return fmt.Errorf("branch_protection %d: branch is required", i+1) - } - } - - return nil -} - -// isValidHexColor checks if a string is a valid 6-digit hex color (without #). -func isValidHexColor(color string) bool { - if len(color) != 6 { - return false - } - for _, c := range strings.ToLower(color) { - if !((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f')) { - return false - } - } - return true -} diff --git a/pkg/setup/github_diff.go b/pkg/setup/github_diff.go deleted file mode 100644 index 8e64c3f..0000000 --- a/pkg/setup/github_diff.go +++ /dev/null @@ -1,278 +0,0 @@ -// github_diff.go provides change tracking for dry-run output. - -package setup - -import ( - "fmt" - "sort" - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" -) - -// ChangeType indicates the type of change being made. -type ChangeType string - -const ( - ChangeCreate ChangeType = "create" - ChangeUpdate ChangeType = "update" - ChangeDelete ChangeType = "delete" - ChangeSkip ChangeType = "skip" -) - -// ChangeCategory groups changes by type. -type ChangeCategory string - -const ( - CategoryLabel ChangeCategory = "label" - CategoryWebhook ChangeCategory = "webhook" - CategoryProtection ChangeCategory = "protection" - CategorySecurity ChangeCategory = "security" -) - -// Change represents a single change to be made. -type Change struct { - Category ChangeCategory - Type ChangeType - Name string - Description string - Details map[string]string // Key-value details about the change -} - -// ChangeSet tracks all changes for a repository. -type ChangeSet struct { - Repo string - Changes []Change -} - -// NewChangeSet creates a new change set for a repository. -func NewChangeSet(repo string) *ChangeSet { - return &ChangeSet{ - Repo: repo, - Changes: make([]Change, 0), - } -} - -// Add adds a change to the set. -func (cs *ChangeSet) Add(category ChangeCategory, changeType ChangeType, name, description string) { - cs.Changes = append(cs.Changes, Change{ - Category: category, - Type: changeType, - Name: name, - Description: description, - Details: make(map[string]string), - }) -} - -// AddWithDetails adds a change with additional details. -func (cs *ChangeSet) AddWithDetails(category ChangeCategory, changeType ChangeType, name, description string, details map[string]string) { - cs.Changes = append(cs.Changes, Change{ - Category: category, - Type: changeType, - Name: name, - Description: description, - Details: details, - }) -} - -// HasChanges returns true if there are any non-skip changes. -func (cs *ChangeSet) HasChanges() bool { - for _, c := range cs.Changes { - if c.Type != ChangeSkip { - return true - } - } - return false -} - -// Count returns the number of changes by type. -func (cs *ChangeSet) Count() (creates, updates, deletes, skips int) { - for _, c := range cs.Changes { - switch c.Type { - case ChangeCreate: - creates++ - case ChangeUpdate: - updates++ - case ChangeDelete: - deletes++ - case ChangeSkip: - skips++ - } - } - return -} - -// CountByCategory returns changes grouped by category. -func (cs *ChangeSet) CountByCategory() map[ChangeCategory]int { - counts := make(map[ChangeCategory]int) - for _, c := range cs.Changes { - if c.Type != ChangeSkip { - counts[c.Category]++ - } - } - return counts -} - -// Print outputs the change set to the console. -func (cs *ChangeSet) Print(verbose bool) { - creates, updates, deletes, skips := cs.Count() - - // Print header - fmt.Printf("\n%s %s\n", dimStyle.Render(i18n.Label("repo")), repoNameStyle.Render(cs.Repo)) - - if !cs.HasChanges() { - fmt.Printf(" %s\n", dimStyle.Render(i18n.T("cmd.setup.github.no_changes"))) - return - } - - // Print summary - var parts []string - if creates > 0 { - parts = append(parts, successStyle.Render(fmt.Sprintf("+%d", creates))) - } - if updates > 0 { - parts = append(parts, warningStyle.Render(fmt.Sprintf("~%d", updates))) - } - if deletes > 0 { - parts = append(parts, errorStyle.Render(fmt.Sprintf("-%d", deletes))) - } - if skips > 0 && verbose { - parts = append(parts, dimStyle.Render(fmt.Sprintf("=%d", skips))) - } - fmt.Printf(" %s\n", strings.Join(parts, " ")) - - // Print details if verbose - if verbose { - cs.printByCategory(CategoryLabel, "Labels") - cs.printByCategory(CategoryWebhook, "Webhooks") - cs.printByCategory(CategoryProtection, "Branch protection") - cs.printByCategory(CategorySecurity, "Security") - } -} - -func (cs *ChangeSet) printByCategory(category ChangeCategory, title string) { - var categoryChanges []Change - for _, c := range cs.Changes { - if c.Category == category && c.Type != ChangeSkip { - categoryChanges = append(categoryChanges, c) - } - } - - if len(categoryChanges) == 0 { - return - } - - fmt.Printf("\n %s:\n", dimStyle.Render(title)) - for _, c := range categoryChanges { - icon := getChangeIcon(c.Type) - style := getChangeStyle(c.Type) - fmt.Printf(" %s %s", style.Render(icon), c.Name) - if c.Description != "" { - fmt.Printf(" %s", dimStyle.Render(c.Description)) - } - fmt.Println() - - // Print details (sorted for deterministic output) - keys := make([]string, 0, len(c.Details)) - for k := range c.Details { - keys = append(keys, k) - } - sort.Strings(keys) - for _, k := range keys { - fmt.Printf(" %s: %s\n", dimStyle.Render(k), c.Details[k]) - } - } -} - -func getChangeIcon(t ChangeType) string { - switch t { - case ChangeCreate: - return "+" - case ChangeUpdate: - return "~" - case ChangeDelete: - return "-" - default: - return "=" - } -} - -func getChangeStyle(t ChangeType) *cli.AnsiStyle { - switch t { - case ChangeCreate: - return successStyle - case ChangeUpdate: - return warningStyle - case ChangeDelete: - return errorStyle - default: - return dimStyle - } -} - -// Aggregate combines multiple change sets into a summary. -type Aggregate struct { - Sets []*ChangeSet -} - -// NewAggregate creates a new aggregate. -func NewAggregate() *Aggregate { - return &Aggregate{ - Sets: make([]*ChangeSet, 0), - } -} - -// Add adds a change set to the aggregate. -func (a *Aggregate) Add(cs *ChangeSet) { - a.Sets = append(a.Sets, cs) -} - -// TotalChanges returns the total number of changes across all sets. -func (a *Aggregate) TotalChanges() (creates, updates, deletes, skips int) { - for _, cs := range a.Sets { - c, u, d, s := cs.Count() - creates += c - updates += u - deletes += d - skips += s - } - return -} - -// ReposWithChanges returns the number of repos that have changes. -func (a *Aggregate) ReposWithChanges() int { - count := 0 - for _, cs := range a.Sets { - if cs.HasChanges() { - count++ - } - } - return count -} - -// PrintSummary outputs the aggregate summary. -func (a *Aggregate) PrintSummary() { - creates, updates, deletes, _ := a.TotalChanges() - reposWithChanges := a.ReposWithChanges() - - fmt.Println() - fmt.Printf("%s\n", dimStyle.Render(i18n.Label("summary"))) - fmt.Printf(" %s: %d\n", i18n.T("cmd.setup.github.repos_checked"), len(a.Sets)) - - if reposWithChanges == 0 { - fmt.Printf(" %s\n", dimStyle.Render(i18n.T("cmd.setup.github.all_up_to_date"))) - return - } - - fmt.Printf(" %s: %d\n", i18n.T("cmd.setup.github.repos_with_changes"), reposWithChanges) - if creates > 0 { - fmt.Printf(" %s: %s\n", i18n.T("cmd.setup.github.to_create"), successStyle.Render(fmt.Sprintf("%d", creates))) - } - if updates > 0 { - fmt.Printf(" %s: %s\n", i18n.T("cmd.setup.github.to_update"), warningStyle.Render(fmt.Sprintf("%d", updates))) - } - if deletes > 0 { - fmt.Printf(" %s: %s\n", i18n.T("cmd.setup.github.to_delete"), errorStyle.Render(fmt.Sprintf("%d", deletes))) - } -} diff --git a/pkg/setup/github_labels.go b/pkg/setup/github_labels.go deleted file mode 100644 index c936eb0..0000000 --- a/pkg/setup/github_labels.go +++ /dev/null @@ -1,152 +0,0 @@ -// github_labels.go implements GitHub label synchronization. -// -// Uses the gh CLI for label operations: -// - gh label list --repo {repo} --json name,color,description -// - gh label create --repo {repo} {name} --color {color} --description {desc} -// - gh label edit --repo {repo} {name} --color {color} --description {desc} - -package setup - -import ( - "encoding/json" - "os/exec" - "strings" - - "github.com/host-uk/core/pkg/cli" -) - -// GitHubLabel represents a label as returned by the GitHub API. -type GitHubLabel struct { - Name string `json:"name"` - Color string `json:"color"` - Description string `json:"description"` -} - -// ListLabels fetches all labels for a repository. -func ListLabels(repoFullName string) ([]GitHubLabel, error) { - args := []string{ - "label", "list", - "--repo", repoFullName, - "--json", "name,color,description", - "--limit", "200", - } - - cmd := exec.Command("gh", args...) - output, err := cmd.Output() - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - return nil, cli.Err("%s", strings.TrimSpace(string(exitErr.Stderr))) - } - return nil, err - } - - var labels []GitHubLabel - if err := json.Unmarshal(output, &labels); err != nil { - return nil, err - } - - return labels, nil -} - -// CreateLabel creates a new label in a repository. -func CreateLabel(repoFullName string, label LabelConfig) error { - args := []string{ - "label", "create", - "--repo", repoFullName, - label.Name, - "--color", label.Color, - } - - if label.Description != "" { - args = append(args, "--description", label.Description) - } - - cmd := exec.Command("gh", args...) - output, err := cmd.CombinedOutput() - if err != nil { - return cli.Err("%s", strings.TrimSpace(string(output))) - } - return nil -} - -// EditLabel updates an existing label in a repository. -func EditLabel(repoFullName string, label LabelConfig) error { - args := []string{ - "label", "edit", - "--repo", repoFullName, - label.Name, - "--color", label.Color, - } - - if label.Description != "" { - args = append(args, "--description", label.Description) - } - - cmd := exec.Command("gh", args...) - output, err := cmd.CombinedOutput() - if err != nil { - return cli.Err("%s", strings.TrimSpace(string(output))) - } - return nil -} - -// SyncLabels synchronizes labels for a repository. -// Returns a ChangeSet describing what was changed (or would be changed in dry-run mode). -func SyncLabels(repoFullName string, config *GitHubConfig, dryRun bool) (*ChangeSet, error) { - changes := NewChangeSet(repoFullName) - - // Get existing labels - existing, err := ListLabels(repoFullName) - if err != nil { - return nil, cli.Wrap(err, "failed to list labels") - } - - // Build lookup map - existingMap := make(map[string]GitHubLabel) - for _, label := range existing { - existingMap[strings.ToLower(label.Name)] = label - } - - // Process each configured label - for _, wantLabel := range config.Labels { - key := strings.ToLower(wantLabel.Name) - existing, exists := existingMap[key] - - if !exists { - // Create new label - changes.Add(CategoryLabel, ChangeCreate, wantLabel.Name, wantLabel.Description) - if !dryRun { - if err := CreateLabel(repoFullName, wantLabel); err != nil { - return changes, cli.Wrap(err, "failed to create label "+wantLabel.Name) - } - } - continue - } - - // Check if update is needed - needsUpdate := false - details := make(map[string]string) - - if !strings.EqualFold(existing.Color, wantLabel.Color) { - needsUpdate = true - details["color"] = existing.Color + " -> " + wantLabel.Color - } - if existing.Description != wantLabel.Description { - needsUpdate = true - details["description"] = "updated" - } - - if needsUpdate { - changes.AddWithDetails(CategoryLabel, ChangeUpdate, wantLabel.Name, "", details) - if !dryRun { - if err := EditLabel(repoFullName, wantLabel); err != nil { - return changes, cli.Wrap(err, "failed to update label "+wantLabel.Name) - } - } - } else { - changes.Add(CategoryLabel, ChangeSkip, wantLabel.Name, "up to date") - } - } - - return changes, nil -} diff --git a/pkg/setup/github_protection.go b/pkg/setup/github_protection.go deleted file mode 100644 index 916ffe8..0000000 --- a/pkg/setup/github_protection.go +++ /dev/null @@ -1,299 +0,0 @@ -// github_protection.go implements GitHub branch protection synchronization. -// -// Uses the gh api command for branch protection operations: -// - gh api repos/{owner}/{repo}/branches/{branch}/protection --method GET -// - gh api repos/{owner}/{repo}/branches/{branch}/protection --method PUT - -package setup - -import ( - "encoding/json" - "fmt" - "os/exec" - "strings" - - "github.com/host-uk/core/pkg/cli" -) - -// GitHubBranchProtection represents branch protection rules from the GitHub API. -type GitHubBranchProtection struct { - RequiredStatusChecks *RequiredStatusChecks `json:"required_status_checks"` - RequiredPullRequestReviews *RequiredPullRequestReviews `json:"required_pull_request_reviews"` - EnforceAdmins *EnforceAdmins `json:"enforce_admins"` - RequiredLinearHistory *RequiredLinearHistory `json:"required_linear_history"` - AllowForcePushes *AllowForcePushes `json:"allow_force_pushes"` - AllowDeletions *AllowDeletions `json:"allow_deletions"` - RequiredConversationResolution *RequiredConversationResolution `json:"required_conversation_resolution"` -} - -// RequiredStatusChecks defines required CI checks. -type RequiredStatusChecks struct { - Strict bool `json:"strict"` - Contexts []string `json:"contexts"` -} - -// RequiredPullRequestReviews defines review requirements. -type RequiredPullRequestReviews struct { - DismissStaleReviews bool `json:"dismiss_stale_reviews"` - RequireCodeOwnerReviews bool `json:"require_code_owner_reviews"` - RequiredApprovingReviewCount int `json:"required_approving_review_count"` -} - -// EnforceAdmins indicates if admins are subject to rules. -type EnforceAdmins struct { - Enabled bool `json:"enabled"` -} - -// RequiredLinearHistory indicates if linear history is required. -type RequiredLinearHistory struct { - Enabled bool `json:"enabled"` -} - -// AllowForcePushes indicates if force pushes are allowed. -type AllowForcePushes struct { - Enabled bool `json:"enabled"` -} - -// AllowDeletions indicates if branch deletion is allowed. -type AllowDeletions struct { - Enabled bool `json:"enabled"` -} - -// RequiredConversationResolution indicates if conversation resolution is required. -type RequiredConversationResolution struct { - Enabled bool `json:"enabled"` -} - -// GetBranchProtection fetches branch protection rules for a branch. -func GetBranchProtection(repoFullName, branch string) (*GitHubBranchProtection, error) { - parts := strings.Split(repoFullName, "/") - if len(parts) != 2 { - return nil, fmt.Errorf("invalid repo format: %s", repoFullName) - } - - endpoint := fmt.Sprintf("repos/%s/%s/branches/%s/protection", parts[0], parts[1], branch) - cmd := exec.Command("gh", "api", endpoint) - output, err := cmd.Output() - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - stderr := strings.TrimSpace(string(exitErr.Stderr)) - // Branch protection not enabled returns 404 - if strings.Contains(stderr, "404") || strings.Contains(stderr, "Branch not protected") { - return nil, nil // No protection set - } - if strings.Contains(stderr, "403") { - return nil, cli.Err("insufficient permissions to manage branch protection (requires admin)") - } - return nil, cli.Err("%s", stderr) - } - return nil, err - } - - var protection GitHubBranchProtection - if err := json.Unmarshal(output, &protection); err != nil { - return nil, err - } - - return &protection, nil -} - -// SetBranchProtection sets branch protection rules for a branch. -func SetBranchProtection(repoFullName, branch string, config BranchProtectionConfig) error { - parts := strings.Split(repoFullName, "/") - if len(parts) != 2 { - return fmt.Errorf("invalid repo format: %s", repoFullName) - } - - // Build the protection payload - payload := map[string]interface{}{ - "enforce_admins": config.EnforceAdmins, - "required_linear_history": config.RequireLinearHistory, - "allow_force_pushes": config.AllowForcePushes, - "allow_deletions": config.AllowDeletions, - "required_conversation_resolution": config.RequireConversationResolution, - } - - // Required pull request reviews - if config.RequiredReviews > 0 { - payload["required_pull_request_reviews"] = map[string]interface{}{ - "dismiss_stale_reviews": config.DismissStale, - "require_code_owner_reviews": config.RequireCodeOwnerReviews, - "required_approving_review_count": config.RequiredReviews, - } - } else { - payload["required_pull_request_reviews"] = nil - } - - // Required status checks - if len(config.RequiredStatusChecks) > 0 { - payload["required_status_checks"] = map[string]interface{}{ - "strict": true, - "contexts": config.RequiredStatusChecks, - } - } else { - payload["required_status_checks"] = nil - } - - // Restrictions (required but can be empty for non-org repos) - payload["restrictions"] = nil - - payloadJSON, err := json.Marshal(payload) - if err != nil { - return err - } - - endpoint := fmt.Sprintf("repos/%s/%s/branches/%s/protection", parts[0], parts[1], branch) - cmd := exec.Command("gh", "api", endpoint, "--method", "PUT", "--input", "-") - cmd.Stdin = strings.NewReader(string(payloadJSON)) - output, err := cmd.CombinedOutput() - if err != nil { - return cli.Err("%s", strings.TrimSpace(string(output))) - } - return nil -} - -// SyncBranchProtection synchronizes branch protection for a repository. -func SyncBranchProtection(repoFullName string, config *GitHubConfig, dryRun bool) (*ChangeSet, error) { - changes := NewChangeSet(repoFullName) - - // Skip if no branch protection configured - if len(config.BranchProtection) == 0 { - return changes, nil - } - - // Process each configured branch - for _, wantProtection := range config.BranchProtection { - branch := wantProtection.Branch - - // Get existing protection - existing, err := GetBranchProtection(repoFullName, branch) - if err != nil { - // If permission denied, note it but don't fail - if strings.Contains(err.Error(), "insufficient permissions") { - changes.Add(CategoryProtection, ChangeSkip, branch, "insufficient permissions") - continue - } - return nil, cli.Wrap(err, "failed to get protection for "+branch) - } - - // Check if protection needs to be created or updated - if existing == nil { - // Create new protection - changes.Add(CategoryProtection, ChangeCreate, branch, describeProtection(wantProtection)) - if !dryRun { - if err := SetBranchProtection(repoFullName, branch, wantProtection); err != nil { - return changes, cli.Wrap(err, "failed to set protection for "+branch) - } - } - continue - } - - // Compare and check if update is needed - needsUpdate := false - details := make(map[string]string) - - // Check required reviews - existingReviews := 0 - existingDismissStale := false - existingCodeOwner := false - if existing.RequiredPullRequestReviews != nil { - existingReviews = existing.RequiredPullRequestReviews.RequiredApprovingReviewCount - existingDismissStale = existing.RequiredPullRequestReviews.DismissStaleReviews - existingCodeOwner = existing.RequiredPullRequestReviews.RequireCodeOwnerReviews - } - - if existingReviews != wantProtection.RequiredReviews { - needsUpdate = true - details["required_reviews"] = fmt.Sprintf("%d -> %d", existingReviews, wantProtection.RequiredReviews) - } - if existingDismissStale != wantProtection.DismissStale { - needsUpdate = true - details["dismiss_stale"] = fmt.Sprintf("%v -> %v", existingDismissStale, wantProtection.DismissStale) - } - if existingCodeOwner != wantProtection.RequireCodeOwnerReviews { - needsUpdate = true - details["code_owner_reviews"] = fmt.Sprintf("%v -> %v", existingCodeOwner, wantProtection.RequireCodeOwnerReviews) - } - - // Check enforce admins - existingEnforceAdmins := false - if existing.EnforceAdmins != nil { - existingEnforceAdmins = existing.EnforceAdmins.Enabled - } - if existingEnforceAdmins != wantProtection.EnforceAdmins { - needsUpdate = true - details["enforce_admins"] = fmt.Sprintf("%v -> %v", existingEnforceAdmins, wantProtection.EnforceAdmins) - } - - // Check linear history - existingLinear := false - if existing.RequiredLinearHistory != nil { - existingLinear = existing.RequiredLinearHistory.Enabled - } - if existingLinear != wantProtection.RequireLinearHistory { - needsUpdate = true - details["linear_history"] = fmt.Sprintf("%v -> %v", existingLinear, wantProtection.RequireLinearHistory) - } - - // Check force pushes - existingForcePush := false - if existing.AllowForcePushes != nil { - existingForcePush = existing.AllowForcePushes.Enabled - } - if existingForcePush != wantProtection.AllowForcePushes { - needsUpdate = true - details["allow_force_pushes"] = fmt.Sprintf("%v -> %v", existingForcePush, wantProtection.AllowForcePushes) - } - - // Check deletions - existingDeletions := false - if existing.AllowDeletions != nil { - existingDeletions = existing.AllowDeletions.Enabled - } - if existingDeletions != wantProtection.AllowDeletions { - needsUpdate = true - details["allow_deletions"] = fmt.Sprintf("%v -> %v", existingDeletions, wantProtection.AllowDeletions) - } - - // Check required status checks - var existingStatusChecks []string - if existing.RequiredStatusChecks != nil { - existingStatusChecks = existing.RequiredStatusChecks.Contexts - } - if !stringSliceEqual(existingStatusChecks, wantProtection.RequiredStatusChecks) { - needsUpdate = true - details["status_checks"] = fmt.Sprintf("%v -> %v", existingStatusChecks, wantProtection.RequiredStatusChecks) - } - - if needsUpdate { - changes.AddWithDetails(CategoryProtection, ChangeUpdate, branch, "", details) - if !dryRun { - if err := SetBranchProtection(repoFullName, branch, wantProtection); err != nil { - return changes, cli.Wrap(err, "failed to update protection for "+branch) - } - } - } else { - changes.Add(CategoryProtection, ChangeSkip, branch, "up to date") - } - } - - return changes, nil -} - -// describeProtection returns a human-readable description of protection rules. -func describeProtection(p BranchProtectionConfig) string { - var parts []string - if p.RequiredReviews > 0 { - parts = append(parts, fmt.Sprintf("%d review(s)", p.RequiredReviews)) - } - if p.DismissStale { - parts = append(parts, "dismiss stale") - } - if p.EnforceAdmins { - parts = append(parts, "enforce admins") - } - if len(parts) == 0 { - return "basic protection" - } - return strings.Join(parts, ", ") -} diff --git a/pkg/setup/github_security.go b/pkg/setup/github_security.go deleted file mode 100644 index 07c828c..0000000 --- a/pkg/setup/github_security.go +++ /dev/null @@ -1,281 +0,0 @@ -// github_security.go implements GitHub security settings synchronization. -// -// Uses the gh api command for security settings: -// - gh api repos/{owner}/{repo}/vulnerability-alerts --method GET (check if enabled) -// - gh api repos/{owner}/{repo}/vulnerability-alerts --method PUT (enable) -// - gh api repos/{owner}/{repo}/automated-security-fixes --method PUT (enable dependabot updates) -// - gh api repos/{owner}/{repo} --method PATCH (security_and_analysis settings) - -package setup - -import ( - "encoding/json" - "fmt" - "os/exec" - "strings" - - "github.com/host-uk/core/pkg/cli" -) - -// GitHubSecurityStatus represents the security settings status of a repository. -type GitHubSecurityStatus struct { - DependabotAlerts bool - DependabotSecurityUpdates bool - SecretScanning bool - SecretScanningPushProtection bool -} - -// GitHubRepoResponse contains security-related fields from repo API. -type GitHubRepoResponse struct { - SecurityAndAnalysis *SecurityAndAnalysis `json:"security_and_analysis"` -} - -// SecurityAndAnalysis contains security feature settings. -type SecurityAndAnalysis struct { - SecretScanning *SecurityFeature `json:"secret_scanning"` - SecretScanningPushProtection *SecurityFeature `json:"secret_scanning_push_protection"` - DependabotSecurityUpdates *SecurityFeature `json:"dependabot_security_updates"` -} - -// SecurityFeature represents a single security feature status. -type SecurityFeature struct { - Status string `json:"status"` // "enabled" or "disabled" -} - -// GetSecuritySettings fetches current security settings for a repository. -func GetSecuritySettings(repoFullName string) (*GitHubSecurityStatus, error) { - parts := strings.Split(repoFullName, "/") - if len(parts) != 2 { - return nil, fmt.Errorf("invalid repo format: %s", repoFullName) - } - - status := &GitHubSecurityStatus{} - - // Check Dependabot alerts (vulnerability alerts) - endpoint := fmt.Sprintf("repos/%s/%s/vulnerability-alerts", parts[0], parts[1]) - cmd := exec.Command("gh", "api", endpoint, "--method", "GET") - _, err := cmd.Output() - if err == nil { - status.DependabotAlerts = true - } else if exitErr, ok := err.(*exec.ExitError); ok { - stderr := string(exitErr.Stderr) - // 404 means alerts are disabled, 204 means enabled - if strings.Contains(stderr, "403") { - return nil, cli.Err("insufficient permissions to check security settings") - } - // Other errors (like 404) mean alerts are disabled - status.DependabotAlerts = false - } - - // Get repo security_and_analysis settings - endpoint = fmt.Sprintf("repos/%s/%s", parts[0], parts[1]) - cmd = exec.Command("gh", "api", endpoint) - output, err := cmd.Output() - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - return nil, cli.Err("%s", strings.TrimSpace(string(exitErr.Stderr))) - } - return nil, err - } - - var repo GitHubRepoResponse - if err := json.Unmarshal(output, &repo); err != nil { - return nil, err - } - - if repo.SecurityAndAnalysis != nil { - if repo.SecurityAndAnalysis.SecretScanning != nil { - status.SecretScanning = repo.SecurityAndAnalysis.SecretScanning.Status == "enabled" - } - if repo.SecurityAndAnalysis.SecretScanningPushProtection != nil { - status.SecretScanningPushProtection = repo.SecurityAndAnalysis.SecretScanningPushProtection.Status == "enabled" - } - if repo.SecurityAndAnalysis.DependabotSecurityUpdates != nil { - status.DependabotSecurityUpdates = repo.SecurityAndAnalysis.DependabotSecurityUpdates.Status == "enabled" - } - } - - return status, nil -} - -// EnableDependabotAlerts enables Dependabot vulnerability alerts. -func EnableDependabotAlerts(repoFullName string) error { - parts := strings.Split(repoFullName, "/") - if len(parts) != 2 { - return fmt.Errorf("invalid repo format: %s", repoFullName) - } - - endpoint := fmt.Sprintf("repos/%s/%s/vulnerability-alerts", parts[0], parts[1]) - cmd := exec.Command("gh", "api", endpoint, "--method", "PUT") - output, err := cmd.CombinedOutput() - if err != nil { - return cli.Err("%s", strings.TrimSpace(string(output))) - } - return nil -} - -// EnableDependabotSecurityUpdates enables automated Dependabot security updates. -func EnableDependabotSecurityUpdates(repoFullName string) error { - parts := strings.Split(repoFullName, "/") - if len(parts) != 2 { - return fmt.Errorf("invalid repo format: %s", repoFullName) - } - - endpoint := fmt.Sprintf("repos/%s/%s/automated-security-fixes", parts[0], parts[1]) - cmd := exec.Command("gh", "api", endpoint, "--method", "PUT") - output, err := cmd.CombinedOutput() - if err != nil { - return cli.Err("%s", strings.TrimSpace(string(output))) - } - return nil -} - -// DisableDependabotSecurityUpdates disables automated Dependabot security updates. -func DisableDependabotSecurityUpdates(repoFullName string) error { - parts := strings.Split(repoFullName, "/") - if len(parts) != 2 { - return fmt.Errorf("invalid repo format: %s", repoFullName) - } - - endpoint := fmt.Sprintf("repos/%s/%s/automated-security-fixes", parts[0], parts[1]) - cmd := exec.Command("gh", "api", endpoint, "--method", "DELETE") - output, err := cmd.CombinedOutput() - if err != nil { - return cli.Err("%s", strings.TrimSpace(string(output))) - } - return nil -} - -// UpdateSecurityAndAnalysis updates security_and_analysis settings. -func UpdateSecurityAndAnalysis(repoFullName string, secretScanning, pushProtection bool) error { - parts := strings.Split(repoFullName, "/") - if len(parts) != 2 { - return fmt.Errorf("invalid repo format: %s", repoFullName) - } - - // Build the payload - payload := map[string]interface{}{ - "security_and_analysis": map[string]interface{}{ - "secret_scanning": map[string]string{ - "status": boolToStatus(secretScanning), - }, - "secret_scanning_push_protection": map[string]string{ - "status": boolToStatus(pushProtection), - }, - }, - } - - payloadJSON, err := json.Marshal(payload) - if err != nil { - return err - } - - endpoint := fmt.Sprintf("repos/%s/%s", parts[0], parts[1]) - cmd := exec.Command("gh", "api", endpoint, "--method", "PATCH", "--input", "-") - cmd.Stdin = strings.NewReader(string(payloadJSON)) - output, err := cmd.CombinedOutput() - if err != nil { - errStr := strings.TrimSpace(string(output)) - // Some repos (private without GHAS) don't support these features - if strings.Contains(errStr, "secret scanning") || strings.Contains(errStr, "not available") { - return nil // Silently skip unsupported features - } - return cli.Err("%s", errStr) - } - return nil -} - -func boolToStatus(b bool) string { - if b { - return "enabled" - } - return "disabled" -} - -// SyncSecuritySettings synchronizes security settings for a repository. -func SyncSecuritySettings(repoFullName string, config *GitHubConfig, dryRun bool) (*ChangeSet, error) { - changes := NewChangeSet(repoFullName) - - // Get current settings - existing, err := GetSecuritySettings(repoFullName) - if err != nil { - // If permission denied, note it but don't fail - if strings.Contains(err.Error(), "insufficient permissions") { - changes.Add(CategorySecurity, ChangeSkip, "all", "insufficient permissions") - return changes, nil - } - return nil, cli.Wrap(err, "failed to get security settings") - } - - wantConfig := config.Security - - // Check Dependabot alerts - if wantConfig.DependabotAlerts && !existing.DependabotAlerts { - changes.Add(CategorySecurity, ChangeCreate, "dependabot_alerts", "enable") - if !dryRun { - if err := EnableDependabotAlerts(repoFullName); err != nil { - return changes, cli.Wrap(err, "failed to enable dependabot alerts") - } - } - } else if !wantConfig.DependabotAlerts && existing.DependabotAlerts { - changes.Add(CategorySecurity, ChangeSkip, "dependabot_alerts", "cannot disable via API") - } else { - changes.Add(CategorySecurity, ChangeSkip, "dependabot_alerts", "up to date") - } - - // Check Dependabot security updates - if wantConfig.DependabotSecurityUpdates && !existing.DependabotSecurityUpdates { - changes.Add(CategorySecurity, ChangeCreate, "dependabot_security_updates", "enable") - if !dryRun { - if err := EnableDependabotSecurityUpdates(repoFullName); err != nil { - // This might fail if alerts aren't enabled first - return changes, cli.Wrap(err, "failed to enable dependabot security updates") - } - } - } else if !wantConfig.DependabotSecurityUpdates && existing.DependabotSecurityUpdates { - changes.Add(CategorySecurity, ChangeDelete, "dependabot_security_updates", "disable") - if !dryRun { - if err := DisableDependabotSecurityUpdates(repoFullName); err != nil { - return changes, cli.Wrap(err, "failed to disable dependabot security updates") - } - } - } else { - changes.Add(CategorySecurity, ChangeSkip, "dependabot_security_updates", "up to date") - } - - // Check secret scanning and push protection - needsSecurityUpdate := false - if wantConfig.SecretScanning != existing.SecretScanning { - needsSecurityUpdate = true - if wantConfig.SecretScanning { - changes.Add(CategorySecurity, ChangeCreate, "secret_scanning", "enable") - } else { - changes.Add(CategorySecurity, ChangeDelete, "secret_scanning", "disable") - } - } else { - changes.Add(CategorySecurity, ChangeSkip, "secret_scanning", "up to date") - } - - if wantConfig.SecretScanningPushProtection != existing.SecretScanningPushProtection { - needsSecurityUpdate = true - if wantConfig.SecretScanningPushProtection { - changes.Add(CategorySecurity, ChangeCreate, "push_protection", "enable") - } else { - changes.Add(CategorySecurity, ChangeDelete, "push_protection", "disable") - } - } else { - changes.Add(CategorySecurity, ChangeSkip, "push_protection", "up to date") - } - - // Apply security_and_analysis changes - if needsSecurityUpdate && !dryRun { - if err := UpdateSecurityAndAnalysis(repoFullName, wantConfig.SecretScanning, wantConfig.SecretScanningPushProtection); err != nil { - // Don't fail on unsupported features - if !strings.Contains(err.Error(), "not available") { - return changes, cli.Wrap(err, "failed to update security settings") - } - } - } - - return changes, nil -} diff --git a/pkg/setup/github_webhooks.go b/pkg/setup/github_webhooks.go deleted file mode 100644 index 11d395d..0000000 --- a/pkg/setup/github_webhooks.go +++ /dev/null @@ -1,263 +0,0 @@ -// github_webhooks.go implements GitHub webhook synchronization. -// -// Uses the gh api command for webhook operations: -// - gh api repos/{owner}/{repo}/hooks --method GET -// - gh api repos/{owner}/{repo}/hooks --method POST - -package setup - -import ( - "encoding/json" - "fmt" - "os/exec" - "strings" - - "github.com/host-uk/core/pkg/cli" -) - -// GitHubWebhook represents a webhook as returned by the GitHub API. -type GitHubWebhook struct { - ID int `json:"id"` - Name string `json:"name"` - Active bool `json:"active"` - Events []string `json:"events"` - Config GitHubWebhookConfig `json:"config"` -} - -// GitHubWebhookConfig contains webhook configuration details. -type GitHubWebhookConfig struct { - URL string `json:"url"` - ContentType string `json:"content_type"` - InsecureSSL string `json:"insecure_ssl"` -} - -// ListWebhooks fetches all webhooks for a repository. -func ListWebhooks(repoFullName string) ([]GitHubWebhook, error) { - parts := strings.Split(repoFullName, "/") - if len(parts) != 2 { - return nil, fmt.Errorf("invalid repo format: %s", repoFullName) - } - - endpoint := fmt.Sprintf("repos/%s/%s/hooks", parts[0], parts[1]) - cmd := exec.Command("gh", "api", endpoint) - output, err := cmd.Output() - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - stderr := strings.TrimSpace(string(exitErr.Stderr)) - // Check for permission error - if strings.Contains(stderr, "Must have admin rights") || strings.Contains(stderr, "403") { - return nil, cli.Err("insufficient permissions to manage webhooks (requires admin)") - } - return nil, cli.Err("%s", stderr) - } - return nil, err - } - - var hooks []GitHubWebhook - if err := json.Unmarshal(output, &hooks); err != nil { - return nil, err - } - - return hooks, nil -} - -// CreateWebhook creates a new webhook in a repository. -func CreateWebhook(repoFullName string, name string, config WebhookConfig) error { - parts := strings.Split(repoFullName, "/") - if len(parts) != 2 { - return fmt.Errorf("invalid repo format: %s", repoFullName) - } - - // Build the webhook payload - payload := map[string]interface{}{ - "name": "web", - "active": true, - "events": config.Events, - "config": map[string]interface{}{ - "url": config.URL, - "content_type": config.ContentType, - "insecure_ssl": "0", - }, - } - - if config.Active != nil { - payload["active"] = *config.Active - } - - if config.Secret != "" { - configMap := payload["config"].(map[string]interface{}) - configMap["secret"] = config.Secret - } - - payloadJSON, err := json.Marshal(payload) - if err != nil { - return err - } - - endpoint := fmt.Sprintf("repos/%s/%s/hooks", parts[0], parts[1]) - cmd := exec.Command("gh", "api", endpoint, "--method", "POST", "--input", "-") - cmd.Stdin = strings.NewReader(string(payloadJSON)) - output, err := cmd.CombinedOutput() - if err != nil { - return cli.Err("%s", strings.TrimSpace(string(output))) - } - return nil -} - -// UpdateWebhook updates an existing webhook. -func UpdateWebhook(repoFullName string, hookID int, config WebhookConfig) error { - parts := strings.Split(repoFullName, "/") - if len(parts) != 2 { - return fmt.Errorf("invalid repo format: %s", repoFullName) - } - - payload := map[string]interface{}{ - "active": true, - "events": config.Events, - "config": map[string]interface{}{ - "url": config.URL, - "content_type": config.ContentType, - "insecure_ssl": "0", - }, - } - - if config.Active != nil { - payload["active"] = *config.Active - } - - if config.Secret != "" { - configMap := payload["config"].(map[string]interface{}) - configMap["secret"] = config.Secret - } - - payloadJSON, err := json.Marshal(payload) - if err != nil { - return err - } - - endpoint := fmt.Sprintf("repos/%s/%s/hooks/%d", parts[0], parts[1], hookID) - cmd := exec.Command("gh", "api", endpoint, "--method", "PATCH", "--input", "-") - cmd.Stdin = strings.NewReader(string(payloadJSON)) - output, err := cmd.CombinedOutput() - if err != nil { - return cli.Err("%s", strings.TrimSpace(string(output))) - } - return nil -} - -// SyncWebhooks synchronizes webhooks for a repository. -// Webhooks are matched by URL - if a webhook with the same URL exists, it's updated. -// Otherwise, a new webhook is created. -func SyncWebhooks(repoFullName string, config *GitHubConfig, dryRun bool) (*ChangeSet, error) { - changes := NewChangeSet(repoFullName) - - // Skip if no webhooks configured - if len(config.Webhooks) == 0 { - return changes, nil - } - - // Get existing webhooks - existing, err := ListWebhooks(repoFullName) - if err != nil { - // If permission denied, note it but don't fail entirely - if strings.Contains(err.Error(), "insufficient permissions") { - changes.Add(CategoryWebhook, ChangeSkip, "all", "insufficient permissions") - return changes, nil - } - return nil, cli.Wrap(err, "failed to list webhooks") - } - - // Build lookup map by URL - existingByURL := make(map[string]GitHubWebhook) - for _, hook := range existing { - existingByURL[hook.Config.URL] = hook - } - - // Process each configured webhook - for name, wantHook := range config.Webhooks { - // Skip webhooks with empty URLs (env var not set) - if wantHook.URL == "" { - changes.Add(CategoryWebhook, ChangeSkip, name, "URL not configured") - continue - } - - existingHook, exists := existingByURL[wantHook.URL] - - if !exists { - // Create new webhook - changes.Add(CategoryWebhook, ChangeCreate, name, wantHook.URL) - if !dryRun { - if err := CreateWebhook(repoFullName, name, wantHook); err != nil { - return changes, cli.Wrap(err, "failed to create webhook "+name) - } - } - continue - } - - // Check if update is needed - needsUpdate := false - details := make(map[string]string) - - // Check events - if !stringSliceEqual(existingHook.Events, wantHook.Events) { - needsUpdate = true - details["events"] = fmt.Sprintf("%v -> %v", existingHook.Events, wantHook.Events) - } - - // Check content type - if existingHook.Config.ContentType != wantHook.ContentType { - needsUpdate = true - details["content_type"] = fmt.Sprintf("%s -> %s", existingHook.Config.ContentType, wantHook.ContentType) - } - - // Check active state - wantActive := true - if wantHook.Active != nil { - wantActive = *wantHook.Active - } - if existingHook.Active != wantActive { - needsUpdate = true - details["active"] = fmt.Sprintf("%v -> %v", existingHook.Active, wantActive) - } - - if needsUpdate { - changes.AddWithDetails(CategoryWebhook, ChangeUpdate, name, "", details) - if !dryRun { - if err := UpdateWebhook(repoFullName, existingHook.ID, wantHook); err != nil { - return changes, cli.Wrap(err, "failed to update webhook "+name) - } - } - } else { - changes.Add(CategoryWebhook, ChangeSkip, name, "up to date") - } - } - - return changes, nil -} - -// stringSliceEqual compares two string slices for equality (order-independent). -// Uses frequency counting to properly handle duplicates. -func stringSliceEqual(a, b []string) bool { - if len(a) != len(b) { - return false - } - // Count frequencies in slice a - counts := make(map[string]int) - for _, s := range a { - counts[s]++ - } - // Decrement for each element in slice b - for _, s := range b { - counts[s]-- - if counts[s] < 0 { - return false - } - } - // All counts should be zero if slices are equal - for _, count := range counts { - if count != 0 { - return false - } - } - return true -} diff --git a/pkg/test/cmd_commands.go b/pkg/test/cmd_commands.go deleted file mode 100644 index 4cebd34..0000000 --- a/pkg/test/cmd_commands.go +++ /dev/null @@ -1,18 +0,0 @@ -// Package testcmd provides Go test running commands with enhanced output. -// -// Note: Package named testcmd to avoid conflict with Go's test package. -// -// Features: -// - Colour-coded pass/fail/skip output -// - Per-package coverage breakdown with --coverage -// - JSON output for CI/agents with --json -// - Filters linker warnings on macOS -// -// Flags: --verbose, --coverage, --short, --pkg, --run, --race, --json -package testcmd - -import "github.com/host-uk/core/pkg/cli" - -func init() { - cli.RegisterCommands(AddTestCommands) -} diff --git a/pkg/test/cmd_main.go b/pkg/test/cmd_main.go deleted file mode 100644 index 6b1ac5b..0000000 --- a/pkg/test/cmd_main.go +++ /dev/null @@ -1,58 +0,0 @@ -// Package testcmd provides test running commands. -// -// Note: Package named testcmd to avoid conflict with Go's test package. -package testcmd - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -// Style aliases from shared -var ( - testHeaderStyle = cli.RepoStyle - testPassStyle = cli.SuccessStyle - testFailStyle = cli.ErrorStyle - testSkipStyle = cli.WarningStyle - testDimStyle = cli.DimStyle - testCovHighStyle = cli.NewStyle().Foreground(cli.ColourGreen500) - testCovMedStyle = cli.NewStyle().Foreground(cli.ColourAmber500) - testCovLowStyle = cli.NewStyle().Foreground(cli.ColourRed500) -) - -// Flag variables for test command -var ( - testVerbose bool - testCoverage bool - testShort bool - testPkg string - testRun string - testRace bool - testJSON bool -) - -var testCmd = &cobra.Command{ - Use: "test", - Short: i18n.T("cmd.test.short"), - Long: i18n.T("cmd.test.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return runTest(testVerbose, testCoverage, testShort, testPkg, testRun, testRace, testJSON) - }, -} - -func initTestFlags() { - testCmd.Flags().BoolVar(&testVerbose, "verbose", false, i18n.T("cmd.test.flag.verbose")) - testCmd.Flags().BoolVar(&testCoverage, "coverage", false, i18n.T("common.flag.coverage")) - testCmd.Flags().BoolVar(&testShort, "short", false, i18n.T("cmd.test.flag.short")) - testCmd.Flags().StringVar(&testPkg, "pkg", "", i18n.T("cmd.test.flag.pkg")) - testCmd.Flags().StringVar(&testRun, "run", "", i18n.T("cmd.test.flag.run")) - testCmd.Flags().BoolVar(&testRace, "race", false, i18n.T("cmd.test.flag.race")) - testCmd.Flags().BoolVar(&testJSON, "json", false, i18n.T("cmd.test.flag.json")) -} - -// AddTestCommands registers the 'test' command and all subcommands. -func AddTestCommands(root *cobra.Command) { - initTestFlags() - root.AddCommand(testCmd) -} diff --git a/pkg/test/cmd_output.go b/pkg/test/cmd_output.go deleted file mode 100644 index 8532c1c..0000000 --- a/pkg/test/cmd_output.go +++ /dev/null @@ -1,204 +0,0 @@ -package testcmd - -import ( - "bufio" - "fmt" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" - - - "github.com/host-uk/core/pkg/i18n" -) - -type packageCoverage struct { - name string - coverage float64 - hasCov bool -} - -type testResults struct { - packages []packageCoverage - passed int - failed int - skipped int - totalCov float64 - covCount int - failedPkgs []string -} - -func parseTestOutput(output string) testResults { - results := testResults{} - - // Regex patterns - handle both timed and cached test results - // Example: ok github.com/host-uk/core/pkg/crypt 0.015s coverage: 91.2% of statements - // Example: ok github.com/host-uk/core/pkg/crypt (cached) coverage: 91.2% of statements - okPattern := regexp.MustCompile(`^ok\s+(\S+)\s+(?:[\d.]+s|\(cached\))(?:\s+coverage:\s+([\d.]+)%)?`) - failPattern := regexp.MustCompile(`^FAIL\s+(\S+)`) - skipPattern := regexp.MustCompile(`^\?\s+(\S+)\s+\[no test files\]`) - coverPattern := regexp.MustCompile(`coverage:\s+([\d.]+)%`) - - scanner := bufio.NewScanner(strings.NewReader(output)) - for scanner.Scan() { - line := scanner.Text() - - if matches := okPattern.FindStringSubmatch(line); matches != nil { - pkg := packageCoverage{name: matches[1]} - if len(matches) > 2 && matches[2] != "" { - cov, _ := strconv.ParseFloat(matches[2], 64) - pkg.coverage = cov - pkg.hasCov = true - results.totalCov += cov - results.covCount++ - } - results.packages = append(results.packages, pkg) - results.passed++ - } else if matches := failPattern.FindStringSubmatch(line); matches != nil { - results.failed++ - results.failedPkgs = append(results.failedPkgs, matches[1]) - } else if matches := skipPattern.FindStringSubmatch(line); matches != nil { - results.skipped++ - } else if matches := coverPattern.FindStringSubmatch(line); matches != nil { - // Catch any additional coverage lines - cov, _ := strconv.ParseFloat(matches[1], 64) - if cov > 0 { - // Find the last package without coverage and update it - for i := len(results.packages) - 1; i >= 0; i-- { - if !results.packages[i].hasCov { - results.packages[i].coverage = cov - results.packages[i].hasCov = true - results.totalCov += cov - results.covCount++ - break - } - } - } - } - } - - return results -} - -func printTestSummary(results testResults, showCoverage bool) { - // Print pass/fail summary - total := results.passed + results.failed - if total > 0 { - fmt.Printf(" %s %s", testPassStyle.Render("✓"), i18n.T("i18n.count.passed", results.passed)) - if results.failed > 0 { - fmt.Printf(" %s %s", testFailStyle.Render("✗"), i18n.T("i18n.count.failed", results.failed)) - } - if results.skipped > 0 { - fmt.Printf(" %s %s", testSkipStyle.Render("○"), i18n.T("i18n.count.skipped", results.skipped)) - } - fmt.Println() - } - - // Print failed packages - if len(results.failedPkgs) > 0 { - fmt.Printf("\n %s\n", i18n.T("cmd.test.failed_packages")) - for _, pkg := range results.failedPkgs { - fmt.Printf(" %s %s\n", testFailStyle.Render("✗"), pkg) - } - } - - // Print coverage - if showCoverage { - printCoverageSummary(results) - } else if results.covCount > 0 { - avgCov := results.totalCov / float64(results.covCount) - fmt.Printf("\n %s %s\n", i18n.Label("coverage"), formatCoverage(avgCov)) - } -} - -func printCoverageSummary(results testResults) { - if len(results.packages) == 0 { - return - } - - fmt.Printf("\n %s\n", testHeaderStyle.Render(i18n.T("cmd.test.coverage_by_package"))) - - // Sort packages by name - sort.Slice(results.packages, func(i, j int) bool { - return results.packages[i].name < results.packages[j].name - }) - - // Find max package name length for alignment - maxLen := 0 - for _, pkg := range results.packages { - name := shortenPackageName(pkg.name) - if len(name) > maxLen { - maxLen = len(name) - } - } - - // Print each package - for _, pkg := range results.packages { - if !pkg.hasCov { - continue - } - name := shortenPackageName(pkg.name) - padding := strings.Repeat(" ", maxLen-len(name)+2) - fmt.Printf(" %s%s%s\n", name, padding, formatCoverage(pkg.coverage)) - } - - // Print average - if results.covCount > 0 { - avgCov := results.totalCov / float64(results.covCount) - avgLabel := i18n.T("cmd.test.label.average") - padding := strings.Repeat(" ", maxLen-len(avgLabel)+2) - fmt.Printf("\n %s%s%s\n", testHeaderStyle.Render(avgLabel), padding, formatCoverage(avgCov)) - } -} - -func formatCoverage(cov float64) string { - s := fmt.Sprintf("%.1f%%", cov) - if cov >= 80 { - return testCovHighStyle.Render(s) - } else if cov >= 50 { - return testCovMedStyle.Render(s) - } - return testCovLowStyle.Render(s) -} - -func shortenPackageName(name string) string { - // Remove common prefixes - prefixes := []string{ - "github.com/host-uk/core/", - "github.com/host-uk/", - } - for _, prefix := range prefixes { - if strings.HasPrefix(name, prefix) { - return strings.TrimPrefix(name, prefix) - } - } - return filepath.Base(name) -} - -func printJSONResults(results testResults, exitCode int) { - // Simple JSON output for agents - fmt.Printf("{\n") - fmt.Printf(" \"passed\": %d,\n", results.passed) - fmt.Printf(" \"failed\": %d,\n", results.failed) - fmt.Printf(" \"skipped\": %d,\n", results.skipped) - if results.covCount > 0 { - avgCov := results.totalCov / float64(results.covCount) - fmt.Printf(" \"coverage\": %.1f,\n", avgCov) - } - fmt.Printf(" \"exit_code\": %d,\n", exitCode) - if len(results.failedPkgs) > 0 { - fmt.Printf(" \"failed_packages\": [\n") - for i, pkg := range results.failedPkgs { - comma := "," - if i == len(results.failedPkgs)-1 { - comma = "" - } - fmt.Printf(" %q%s\n", pkg, comma) - } - fmt.Printf(" ]\n") - } else { - fmt.Printf(" \"failed_packages\": []\n") - } - fmt.Printf("}\n") -} diff --git a/pkg/test/cmd_runner.go b/pkg/test/cmd_runner.go deleted file mode 100644 index 027a59f..0000000 --- a/pkg/test/cmd_runner.go +++ /dev/null @@ -1,145 +0,0 @@ -package testcmd - -import ( - "bufio" - "errors" - "fmt" - "io" - "os" - "os/exec" - "runtime" - "strings" - - "github.com/host-uk/core/pkg/i18n" -) - -func runTest(verbose, coverage, short bool, pkg, run string, race, jsonOutput bool) error { - // Detect if we're in a Go project - if _, err := os.Stat("go.mod"); os.IsNotExist(err) { - return errors.New(i18n.T("cmd.test.error.no_go_mod")) - } - - // Build command arguments - args := []string{"test"} - - // Default to ./... if no package specified - if pkg == "" { - pkg = "./..." - } - - // Add flags - if verbose { - args = append(args, "-v") - } - if short { - args = append(args, "-short") - } - if run != "" { - args = append(args, "-run", run) - } - if race { - args = append(args, "-race") - } - - // Always add coverage - args = append(args, "-cover") - - // Add package pattern - args = append(args, pkg) - - // Create command - cmd := exec.Command("go", args...) - cmd.Dir, _ = os.Getwd() - - // Set environment to suppress macOS linker warnings - cmd.Env = append(os.Environ(), getMacOSDeploymentTarget()) - - if !jsonOutput { - fmt.Printf("%s %s\n", testHeaderStyle.Render(i18n.Label("test")), i18n.ProgressSubject("run", "tests")) - fmt.Printf(" %s %s\n", i18n.Label("package"), testDimStyle.Render(pkg)) - if run != "" { - fmt.Printf(" %s %s\n", i18n.Label("filter"), testDimStyle.Render(run)) - } - fmt.Println() - } - - // Capture output for parsing - var stdout, stderr strings.Builder - - if verbose && !jsonOutput { - // Stream output in verbose mode, but also capture for parsing - cmd.Stdout = io.MultiWriter(os.Stdout, &stdout) - cmd.Stderr = io.MultiWriter(os.Stderr, &stderr) - } else { - // Capture output for parsing - cmd.Stdout = &stdout - cmd.Stderr = &stderr - } - - err := cmd.Run() - exitCode := 0 - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - exitCode = exitErr.ExitCode() - } - } - - // Combine stdout and stderr for parsing, filtering linker warnings - combined := filterLinkerWarnings(stdout.String() + "\n" + stderr.String()) - - // Parse results - results := parseTestOutput(combined) - - if jsonOutput { - // JSON output for CI/agents - printJSONResults(results, exitCode) - if exitCode != 0 { - return errors.New(i18n.T("i18n.fail.run", "tests")) - } - return nil - } - - // Print summary - if !verbose { - printTestSummary(results, coverage) - } else if coverage { - // In verbose mode, still show coverage summary at end - fmt.Println() - printCoverageSummary(results) - } - - if exitCode != 0 { - fmt.Printf("\n%s %s\n", testFailStyle.Render(i18n.T("cli.fail")), i18n.T("cmd.test.tests_failed")) - return errors.New(i18n.T("i18n.fail.run", "tests")) - } - - fmt.Printf("\n%s %s\n", testPassStyle.Render(i18n.T("cli.pass")), i18n.T("common.result.all_passed")) - return nil -} - -func getMacOSDeploymentTarget() string { - if runtime.GOOS == "darwin" { - // Use deployment target matching current macOS to suppress linker warnings - return "MACOSX_DEPLOYMENT_TARGET=26.0" - } - return "" -} - -func filterLinkerWarnings(output string) string { - // Filter out ld: warning lines that pollute the output - var filtered []string - scanner := bufio.NewScanner(strings.NewReader(output)) - for scanner.Scan() { - line := scanner.Text() - // Skip linker warnings - if strings.HasPrefix(line, "ld: warning:") { - continue - } - // Skip test binary build comments - if strings.HasPrefix(line, "# ") && strings.HasSuffix(line, ".test") { - continue - } - filtered = append(filtered, line) - } - return strings.Join(filtered, "\n") -} diff --git a/pkg/updater/.github/workflows/ci.yml b/pkg/updater/.github/workflows/ci.yml deleted file mode 100644 index 5d8fadd..0000000 --- a/pkg/updater/.github/workflows/ci.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: CI -on: - push: - branches: - - main - pull_request: -jobs: - test: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Set up Go - uses: actions/setup-go@v2 - with: - go-version: 1.25 - - name: Run Go Generate - run: go generate ./... - - name: Run Tests - run: go test -v -coverprofile=coverage.out ./... - - name: Upload to Codecov - uses: codecov/codecov-action@v2 - with: - files: ./coverage.out - fail_ci_if_error: false diff --git a/pkg/updater/.github/workflows/release.yml b/pkg/updater/.github/workflows/release.yml deleted file mode 100644 index 59ed57a..0000000 --- a/pkg/updater/.github/workflows/release.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: release -on: - push: - tags: - - 'v*' -jobs: - goreleaser: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - name: Set up Go - uses: actions/setup-go@v2 - with: - go-version: 1.18 - - name: Run GoReleaser - uses: goreleaser/goreleaser-action@v2 - with: - version: latest - args: release --rm-dist - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/pkg/updater/.gitignore b/pkg/updater/.gitignore deleted file mode 100644 index eddd022..0000000 --- a/pkg/updater/.gitignore +++ /dev/null @@ -1,19 +0,0 @@ -# Go -updater -version.go -*.exe -*.exe~ -*.dll -*.so -*.dylib -*.test -*.out -*.prof - -# Node -node_modules/ -dist/ -.DS_Store -npm-debug.log* -yarn-debug.log* -yarn-error.log* diff --git a/pkg/updater/LICENSE b/pkg/updater/LICENSE deleted file mode 100644 index 4153cd3..0000000 --- a/pkg/updater/LICENSE +++ /dev/null @@ -1,287 +0,0 @@ - EUROPEAN UNION PUBLIC LICENCE v. 1.2 - EUPL © the European Union 2007, 2016 - -This European Union Public Licence (the ‘EUPL’) applies to the Work (as defined -below) which is provided under the terms of this Licence. Any use of the Work, -other than as authorised under this Licence is prohibited (to the extent such -use is covered by a right of the copyright holder of the Work). - -The Work is provided under the terms of this Licence when the Licensor (as -defined below) has placed the following notice immediately following the -copyright notice for the Work: - - Licensed under the EUPL - -or has expressed by any other means his willingness to license under the EUPL. - -1. Definitions - -In this Licence, the following terms have the following meaning: - -- ‘The Licence’: this Licence. - -- ‘The Original Work’: the work or software distributed or communicated by the - Licensor under this Licence, available as Source Code and also as Executable - Code as the case may be. - -- ‘Derivative Works’: the works or software that could be created by the - Licensee, based upon the Original Work or modifications thereof. This Licence - does not define the extent of modification or dependence on the Original Work - required in order to classify a work as a Derivative Work; this extent is - determined by copyright law applicable in the country mentioned in Article 15. - -- ‘The Work’: the Original Work or its Derivative Works. - -- ‘The Source Code’: the human-readable form of the Work which is the most - convenient for people to study and modify. - -- ‘The Executable Code’: any code which has generally been compiled and which is - meant to be interpreted by a computer as a program. - -- ‘The Licensor’: the natural or legal person that distributes or communicates - the Work under the Licence. - -- ‘Contributor(s)’: any natural or legal person who modifies the Work under the - Licence, or otherwise contributes to the creation of a Derivative Work. - -- ‘The Licensee’ or ‘You’: any natural or legal person who makes any usage of - the Work under the terms of the Licence. - -- ‘Distribution’ or ‘Communication’: any act of selling, giving, lending, - renting, distributing, communicating, transmitting, or otherwise making - available, online or offline, copies of the Work or providing access to its - essential functionalities at the disposal of any other natural or legal - person. - -2. Scope of the rights granted by the Licence - -The Licensor hereby grants You a worldwide, royalty-free, non-exclusive, -sublicensable licence to do the following, for the duration of copyright vested -in the Original Work: - -- use the Work in any circumstance and for all usage, -- reproduce the Work, -- modify the Work, and make Derivative Works based upon the Work, -- communicate to the public, including the right to make available or display - the Work or copies thereof to the public and perform publicly, as the case may - be, the Work, -- distribute the Work or copies thereof, -- lend and rent the Work or copies thereof, -- sublicense rights in the Work or copies thereof. - -Those rights can be exercised on any media, supports and formats, whether now -known or later invented, as far as the applicable law permits so. - -In the countries where moral rights apply, the Licensor waives his right to -exercise his moral right to the extent allowed by law in order to make effective -the licence of the economic rights here above listed. - -The Licensor grants to the Licensee royalty-free, non-exclusive usage rights to -any patents held by the Licensor, to the extent necessary to make use of the -rights granted on the Work under this Licence. - -3. Communication of the Source Code - -The Licensor may provide the Work either in its Source Code form, or as -Executable Code. If the Work is provided as Executable Code, the Licensor -provides in addition a machine-readable copy of the Source Code of the Work -along with each copy of the Work that the Licensor distributes or indicates, in -a notice following the copyright notice attached to the Work, a repository where -the Source Code is easily and freely accessible for as long as the Licensor -continues to distribute or communicate the Work. - -4. Limitations on copyright - -Nothing in this Licence is intended to deprive the Licensee of the benefits from -any exception or limitation to the exclusive rights of the rights owners in the -Work, of the exhaustion of those rights or of other applicable limitations -thereto. - -5. Obligations of the Licensee - -The grant of the rights mentioned above is subject to some restrictions and -obligations imposed on the Licensee. Those obligations are the following: - -Attribution right: The Licensee shall keep intact all copyright, patent or -trademarks notices and all notices that refer to the Licence and to the -disclaimer of warranties. The Licensee must include a copy of such notices and a -copy of the Licence with every copy of the Work he/she distributes or -communicates. The Licensee must cause any Derivative Work to carry prominent -notices stating that the Work has been modified and the date of modification. - -Copyleft clause: If the Licensee distributes or communicates copies of the -Original Works or Derivative Works, this Distribution or Communication will be -done under the terms of this Licence or of a later version of this Licence -unless the Original Work is expressly distributed only under this version of the -Licence — for example by communicating ‘EUPL v. 1.2 only’. The Licensee -(becoming Licensor) cannot offer or impose any additional terms or conditions on -the Work or Derivative Work that alter or restrict the terms of the Licence. - -Compatibility clause: If the Licensee Distributes or Communicates Derivative -Works or copies thereof based upon both the Work and another work licensed under -a Compatible Licence, this Distribution or Communication can be done under the -terms of this Compatible Licence. For the sake of this clause, ‘Compatible -Licence’ refers to the licences listed in the appendix attached to this Licence. -Should the Licensee's obligations under the Compatible Licence conflict with -his/her obligations under this Licence, the obligations of the Compatible -Licence shall prevail. - -Provision of Source Code: When distributing or communicating copies of the Work, -the Licensee will provide a machine-readable copy of the Source Code or indicate -a repository where this Source will be easily and freely available for as long -as the Licensee continues to distribute or communicate the Work. - -Legal Protection: This Licence does not grant permission to use the trade names, -trademarks, service marks, or names of the Licensor, except as required for -reasonable and customary use in describing the origin of the Work and -reproducing the content of the copyright notice. - -6. Chain of Authorship - -The original Licensor warrants that the copyright in the Original Work granted -hereunder is owned by him/her or licensed to him/her and that he/she has the -power and authority to grant the Licence. - -Each Contributor warrants that the copyright in the modifications he/she brings -to the Work are owned by him/her or licensed to him/her and that he/she has the -power and authority to grant the Licence. - -Each time You accept the Licence, the original Licensor and subsequent -Contributors grant You a licence to their contributions to the Work, under the -terms of this Licence. - -7. Disclaimer of Warranty - -The Work is a work in progress, which is continuously improved by numerous -Contributors. It is not a finished work and may therefore contain defects or -‘bugs’ inherent to this type of development. - -For the above reason, the Work is provided under the Licence on an ‘as is’ basis -and without warranties of any kind concerning the Work, including without -limitation merchantability, fitness for a particular purpose, absence of defects -or errors, accuracy, non-infringement of intellectual property rights other than -copyright as stated in Article 6 of this Licence. - -This disclaimer of warranty is an essential part of the Licence and a condition -for the grant of any rights to the Work. - -8. Disclaimer of Liability - -Except in the cases of wilful misconduct or damages directly caused to natural -persons, the Licensor will in no event be liable for any direct or indirect, -material or moral, damages of any kind, arising out of the Licence or of the use -of the Work, including without limitation, damages for loss of goodwill, work -stoppage, computer failure or malfunction, loss of data or any commercial -damage, even if the Licensor has been advised of the possibility of such damage. -However, the Licensor will be liable under statutory product liability laws as -far such laws apply to the Work. - -9. Additional agreements - -While distributing the Work, You may choose to conclude an additional agreement, -defining obligations or services consistent with this Licence. However, if -accepting obligations, You may act only on your own behalf and on your sole -responsibility, not on behalf of the original Licensor or any other Contributor, -and only if You agree to indemnify, defend, and hold each Contributor harmless -for any liability incurred by, or claims asserted against such Contributor by -the fact You have accepted any warranty or additional liability. - -10. Acceptance of the Licence - -The provisions of this Licence can be accepted by clicking on an icon ‘I agree’ -placed under the bottom of a window displaying the text of this Licence or by -affirming consent in any other similar way, in accordance with the rules of -applicable law. Clicking on that icon indicates your clear and irrevocable -acceptance of this Licence and all of its terms and conditions. - -Similarly, you irrevocably accept this Licence and all of its terms and -conditions by exercising any rights granted to You by Article 2 of this Licence, -such as the use of the Work, the creation by You of a Derivative Work or the -Distribution or Communication by You of the Work or copies thereof. - -11. Information to the public - -In case of any Distribution or Communication of the Work by means of electronic -communication by You (for example, by offering to download the Work from a -remote location) the distribution channel or media (for example, a website) must -at least provide to the public the information requested by the applicable law -regarding the Licensor, the Licence and the way it may be accessible, concluded, -stored and reproduced by the Licensee. - -12. Termination of the Licence - -The Licence and the rights granted hereunder will terminate automatically upon -any breach by the Licensee of the terms of the Licence. - -Such a termination will not terminate the licences of any person who has -received the Work from the Licensee under the Licence, provided such persons -remain in full compliance with the Licence. - -13. Miscellaneous - -Without prejudice of Article 9 above, the Licence represents the complete -agreement between the Parties as to the Work. - -If any provision of the Licence is invalid or unenforceable under applicable -law, this will not affect the validity or enforceability of the Licence as a -whole. Such provision will be construed or reformed so as necessary to make it -valid and enforceable. - -The European Commission may publish other linguistic versions or new versions of -this Licence or updated versions of the Appendix, so far this is required and -reasonable, without reducing the scope of the rights granted by the Licence. New -versions of the Licence will be published with a unique version number. - -All linguistic versions of this Licence, approved by the European Commission, -have identical value. Parties can take advantage of the linguistic version of -their choice. - -14. Jurisdiction - -Without prejudice to specific agreement between parties, - -- any litigation resulting from the interpretation of this License, arising - between the European Union institutions, bodies, offices or agencies, as a - Licensor, and any Licensee, will be subject to the jurisdiction of the Court - of Justice of the European Union, as laid down in article 272 of the Treaty on - the Functioning of the European Union, - -- any litigation arising between other parties and resulting from the - interpretation of this License, will be subject to the exclusive jurisdiction - of the competent court where the Licensor resides or conducts its primary - business. - -15. Applicable Law - -Without prejudice to specific agreement between parties, - -- this Licence shall be governed by the law of the European Union Member State - where the Licensor has his seat, resides or has his registered office, - -- this licence shall be governed by Belgian law if the Licensor has no seat, - residence or registered office inside a European Union Member State. - -Appendix - -‘Compatible Licences’ according to Article 5 EUPL are: - -- GNU General Public License (GPL) v. 2, v. 3 -- GNU Affero General Public License (AGPL) v. 3 -- Open Software License (OSL) v. 2.1, v. 3.0 -- Eclipse Public License (EPL) v. 1.0 -- CeCILL v. 2.0, v. 2.1 -- Mozilla Public Licence (MPL) v. 2 -- GNU Lesser General Public Licence (LGPL) v. 2.1, v. 3 -- Creative Commons Attribution-ShareAlike v. 3.0 Unported (CC BY-SA 3.0) for - works other than software -- European Union Public Licence (EUPL) v. 1.1, v. 1.2 -- Québec Free and Open-Source Licence — Reciprocity (LiLiQ-R) or Strong - Reciprocity (LiLiQ-R+). - -The European Commission may update this Appendix to later versions of the above -licences without producing a new version of the EUPL, as long as they provide -the rights granted in Article 2 of this Licence and protect the covered Source -Code from exclusive appropriation. - -All other changes or additions to this Appendix require the production of a new -EUPL version. diff --git a/pkg/updater/Makefile b/pkg/updater/Makefile deleted file mode 100644 index d380a11..0000000 --- a/pkg/updater/Makefile +++ /dev/null @@ -1,40 +0,0 @@ -.PHONY: build dev release-local test coverage - -BINARY_NAME=updater -CMD_PATH=./cmd/updater - -# Default LDFLAGS to empty -LDFLAGS = "" - -# If VERSION is set, override LDFLAGS -ifdef VERSION - LDFLAGS = -ldflags "-X 'github.com/snider/updater.Version=$(VERSION)'" -endif - -.PHONY: generate -generate: - @echo "Generating code..." - @go generate ./... - -build: generate - @echo "Building $(BINARY_NAME)..." - @cd $(CMD_PATH) && go build $(LDFLAGS) - -dev: build - @echo "Running $(BINARY_NAME)..." - @$(CMD_PATH)/$(BINARY_NAME) --check-update - -release-local: - @echo "Running local release with GoReleaser..." - @~/go/bin/goreleaser release --snapshot --clean - -test: - @echo "Running tests..." - @go test ./... - -coverage: - @echo "Generating code coverage report..." - @go test -coverprofile=coverage.out ./... - @echo "Coverage report generated: coverage.out" - @echo "To view in browser: go tool cover -html=coverage.out" - @echo "To upload to Codecov, ensure you have the Codecov CLI installed (e.g., 'go install github.com/codecov/codecov-cli@latest') and run: codecov -f coverage.out" diff --git a/pkg/updater/README.md b/pkg/updater/README.md deleted file mode 100644 index d8c25e4..0000000 --- a/pkg/updater/README.md +++ /dev/null @@ -1,117 +0,0 @@ -# Core Element Template - -This repository is a template for developers to create custom HTML elements for the core web3 framework. It includes a Go backend, an Angular custom element, and a full release cycle configuration. - -## Getting Started - -1. **Clone the repository:** - ```bash - git clone https://github.com/your-username/core-element-template.git - ``` - -2. **Install the dependencies:** - ```bash - cd core-element-template - go mod tidy - cd ui - npm install - ``` - -3. **Run the development server:** - ```bash - go run ./cmd/demo-cli serve - ``` - This will start the Go backend and serve the Angular custom element. - -## Building the Custom Element - -To build the Angular custom element, run the following command: - -```bash -cd ui -npm run build -``` - -This will create a single JavaScript file in the `dist` directory that you can use in any HTML page. - -## Usage - -To use the updater library in your Go project, you can use the `UpdateService`. - -### GitHub-based Updates - -```go -package main - -import ( - "fmt" - "log" - - "github.com/snider/updater" -) - -func main() { - config := updater.UpdateServiceConfig{ - RepoURL: "https://github.com/owner/repo", - Channel: "stable", - CheckOnStartup: updater.CheckAndUpdateOnStartup, - } - - updateService, err := updater.NewUpdateService(config) - if err != nil { - log.Fatalf("Failed to create update service: %v", err) - } - - if err := updateService.Start(); err != nil { - fmt.Printf("Update check failed: %v\n", err) - } -} -``` - -### Generic HTTP Updates - -For updates from a generic HTTP server, the server should provide a `latest.json` file at the root of the `RepoURL`. The JSON file should have the following structure: - -```json -{ - "version": "1.2.3", - "url": "https://your-server.com/path/to/release-asset" -} -``` - -You can then configure the `UpdateService` as follows: - -```go -package main - -import ( - "fmt" - "log" - - "github.com/snider/updater" -) - -func main() { - config := updater.UpdateServiceConfig{ - RepoURL: "https://your-server.com", - CheckOnStartup: updater.CheckAndUpdateOnStartup, - } - - updateService, err := updater.NewUpdateService(config) - if err != nil { - log.Fatalf("Failed to create update service: %v", err) - } - - if err := updateService.Start(); err != nil { - fmt.Printf("Update check failed: %v\n", err) - } -} -``` - -## Contributing - -Contributions are welcome! Please feel free to submit a Pull Request. - -## License - -This project is licensed under the EUPL-1.2 License - see the [LICENSE](LICENSE) file for details. diff --git a/pkg/updater/build/main.go b/pkg/updater/build/main.go deleted file mode 100644 index 851ac13..0000000 --- a/pkg/updater/build/main.go +++ /dev/null @@ -1,36 +0,0 @@ -package main - -import ( - "encoding/json" - "fmt" - "io/ioutil" - "os" -) - -func main() { - // Read package.json - data, err := ioutil.ReadFile("package.json") - if err != nil { - fmt.Println("Error reading package.json, skipping version file generation.") - os.Exit(0) - } - - // Parse package.json - var pkg struct { - Version string `json:"version"` - } - if err := json.Unmarshal(data, &pkg); err != nil { - fmt.Println("Error parsing package.json, skipping version file generation.") - os.Exit(0) - } - - // Create the version file - content := fmt.Sprintf("package updater\n\n// Generated by go:generate. DO NOT EDIT.\n\nconst PkgVersion = %q\n", pkg.Version) - err = ioutil.WriteFile("version.go", []byte(content), 0644) - if err != nil { - fmt.Printf("Error writing version file: %v\n", err) - os.Exit(1) - } - - fmt.Println("Generated version.go with version:", pkg.Version) -} diff --git a/pkg/updater/docs/README.md b/pkg/updater/docs/README.md deleted file mode 100644 index b202692..0000000 --- a/pkg/updater/docs/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# Documentation - -Welcome to the documentation for the `updater` library. This library provides self-update functionality for Go applications, supporting both GitHub Releases and generic HTTP endpoints. - -## Contents - -* [Getting Started](getting-started.md): Installation and basic usage. -* [Configuration](configuration.md): Detailed configuration options for `UpdateService` and CLI flags. -* [Architecture](architecture.md): How the updater works, including GitHub integration and version comparison. diff --git a/pkg/updater/docs/architecture.md b/pkg/updater/docs/architecture.md deleted file mode 100644 index 59fe6d8..0000000 --- a/pkg/updater/docs/architecture.md +++ /dev/null @@ -1,53 +0,0 @@ -# Architecture - -The `updater` library is designed to facilitate self-updates for Go applications by replacing the running binary with a newer version downloaded from a remote source. - -## Update Mechanisms - -The library supports two primary update sources: - -1. **GitHub Releases:** Fetches releases directly from a GitHub repository. -2. **Generic HTTP:** Fetches update information from a generic HTTP endpoint. - -### GitHub Releases - -When configured with a GitHub repository URL (e.g., `https://github.com/owner/repo`), the updater uses the GitHub API to find releases. - -* **Channel Support:** You can specify a "channel" (e.g., "stable", "beta"). The updater will filter releases based on this channel. - * Ideally, this maps to release tags or pre-release status (though the specific implementation details of how "channel" maps to GitHub release types should be verified in the code). -* **Pull Request Updates:** The library supports updating to a specific pull request artifact, useful for testing pre-release builds. - -### Generic HTTP - -When configured with a generic HTTP URL, the updater expects the endpoint to return a JSON object describing the latest version. - -**Expected JSON Format:** - -```json -{ - "version": "1.2.3", - "url": "https://your-server.com/path/to/release-asset" -} -``` - -The updater compares the `version` from the JSON with the current application version. If the remote version is newer, it downloads the binary from the `url`. - -## Version Comparison - -The library uses Semantic Versioning (SemVer) to compare versions. - -* **Prefix Handling:** The `ForceSemVerPrefix` configuration option allows you to standardize version tags by enforcing a `v` prefix (e.g., `v1.0.0` vs `1.0.0`) for consistent comparison. -* **Logic:** - * If `Remote Version` > `Current Version`: Update available. - * If `Remote Version` <= `Current Version`: Up to date. - -## Self-Update Process - -The actual update process is handled by the `minio/selfupdate` library. - -1. **Download:** The new binary is downloaded from the source. -2. **Verification:** (Depending on configuration/implementation) Checksums may be verified. -3. **Apply:** The current executable file is replaced with the new binary. - * **Windows:** The old binary is renamed (often to `.old`) before replacement to allow the write operation. - * **Linux/macOS:** The file is unlinked and replaced. -4. **Restart:** The application usually needs to be restarted for the changes to take effect. The `updater` library currently handles the *replacement*, but the *restart* logic is typically left to the application. diff --git a/pkg/updater/docs/configuration.md b/pkg/updater/docs/configuration.md deleted file mode 100644 index e531834..0000000 --- a/pkg/updater/docs/configuration.md +++ /dev/null @@ -1,34 +0,0 @@ -# Configuration - -The `updater` library is highly configurable via the `UpdateServiceConfig` struct. - -## UpdateServiceConfig - -When creating a new `UpdateService`, you pass a `UpdateServiceConfig` struct. Here are the available fields: - -| Field | Type | Description | -| :--- | :--- | :--- | -| `RepoURL` | `string` | The URL to the repository for updates. Can be a GitHub repository URL (e.g., `https://github.com/owner/repo`) or a base URL for a generic HTTP update server. | -| `Channel` | `string` | Specifies the release channel to track (e.g., "stable", "prerelease"). This is **only used for GitHub-based updates**. | -| `CheckOnStartup` | `StartupCheckMode` | Determines the behavior when the service starts. See [Startup Modes](#startup-modes) below. | -| `ForceSemVerPrefix` | `bool` | Toggles whether to enforce a 'v' prefix on version tags for display and comparison. If `true`, a 'v' prefix is added if missing. | -| `ReleaseURLFormat` | `string` | A template for constructing the download URL for a release asset. The placeholder `{tag}` will be replaced with the release tag. | - -### Startup Modes - -The `CheckOnStartup` field can take one of the following values: - -* `updater.NoCheck`: Disables any checks on startup. -* `updater.CheckOnStartup`: Checks for updates on startup but does not apply them. -* `updater.CheckAndUpdateOnStartup`: Checks for and applies updates on startup. - -## CLI Flags - -If you are using the example CLI provided in `cmd/updater`, the following flags are available: - -* `--check-update`: Check for new updates without applying them. -* `--do-update`: Perform an update if available. -* `--channel`: Set the update channel (e.g., stable, beta, alpha). If not set, it's determined from the current version tag. -* `--force-semver-prefix`: Force 'v' prefix on semver tags (default `true`). -* `--release-url-format`: A URL format for release assets. -* `--pull-request`: Update to a specific pull request (integer ID). diff --git a/pkg/updater/docs/getting-started.md b/pkg/updater/docs/getting-started.md deleted file mode 100644 index 9b5349f..0000000 --- a/pkg/updater/docs/getting-started.md +++ /dev/null @@ -1,85 +0,0 @@ -# Getting Started - -This guide will help you integrate the `updater` library into your Go application. - -## Installation - -To install the library, run: - -```bash -go get github.com/snider/updater -``` - -## Basic Usage - -The `updater` library provides an `UpdateService` that simplifies the process of checking for and applying updates. - -### GitHub-based Updates - -If you are hosting your releases on GitHub, you can configure the service to check your repository. - -```go -package main - -import ( - "fmt" - "log" - - "github.com/snider/updater" -) - -func main() { - // Configure the update service - config := updater.UpdateServiceConfig{ - RepoURL: "https://github.com/your-username/your-repo", - Channel: "stable", // or "beta", "alpha", etc. - CheckOnStartup: updater.CheckAndUpdateOnStartup, - } - - // Create the service - updateService, err := updater.NewUpdateService(config) - if err != nil { - log.Fatalf("Failed to create update service: %v", err) - } - - // Start the service (checks for updates and applies them if configured) - if err := updateService.Start(); err != nil { - fmt.Printf("Update check/apply failed: %v\n", err) - } else { - fmt.Println("Update check completed.") - } -} -``` - -### Generic HTTP Updates - -If you are hosting your releases on a generic HTTP server, the server must provide a way to check for the latest version. - -```go -package main - -import ( - "fmt" - "log" - - "github.com/snider/updater" -) - -func main() { - config := updater.UpdateServiceConfig{ - RepoURL: "https://your-server.com/updates", - CheckOnStartup: updater.CheckOnStartup, // Check only, don't apply automatically - } - - updateService, err := updater.NewUpdateService(config) - if err != nil { - log.Fatalf("Failed to create update service: %v", err) - } - - if err := updateService.Start(); err != nil { - fmt.Printf("Update check failed: %v\n", err) - } -} -``` - -For Generic HTTP updates, the endpoint is expected to return a JSON object with `version` and `url` fields. See [Architecture](architecture.md) for more details. diff --git a/pkg/updater/generic_http.go b/pkg/updater/generic_http.go deleted file mode 100644 index 2161b1f..0000000 --- a/pkg/updater/generic_http.go +++ /dev/null @@ -1,55 +0,0 @@ -package updater - -import ( - "encoding/json" - "fmt" - "net/http" - "net/url" -) - -// GenericUpdateInfo holds the information from a latest.json file. -// This file is expected to be at the root of a generic HTTP update server. -type GenericUpdateInfo struct { - Version string `json:"version"` // The version number of the update. - URL string `json:"url"` // The URL to download the update from. -} - -// GetLatestUpdateFromURL fetches and parses a latest.json file from a base URL. -// The server at the baseURL should host a 'latest.json' file that contains -// the version and download URL for the latest update. -// -// Example of latest.json: -// -// { -// "version": "1.2.3", -// "url": "https://your-server.com/path/to/release-asset" -// } -func GetLatestUpdateFromURL(baseURL string) (*GenericUpdateInfo, error) { - u, err := url.Parse(baseURL) - if err != nil { - return nil, fmt.Errorf("invalid base URL: %w", err) - } - // Append latest.json to the path - u.Path += "/latest.json" - - resp, err := http.Get(u.String()) - if err != nil { - return nil, fmt.Errorf("failed to fetch latest.json: %w", err) - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("failed to fetch latest.json: status code %d", resp.StatusCode) - } - - var info GenericUpdateInfo - if err := json.NewDecoder(resp.Body).Decode(&info); err != nil { - return nil, fmt.Errorf("failed to parse latest.json: %w", err) - } - - if info.Version == "" || info.URL == "" { - return nil, fmt.Errorf("invalid latest.json content: version or url is missing") - } - - return &info, nil -} diff --git a/pkg/updater/generic_http_test.go b/pkg/updater/generic_http_test.go deleted file mode 100644 index bf51b48..0000000 --- a/pkg/updater/generic_http_test.go +++ /dev/null @@ -1,77 +0,0 @@ -package updater - -import ( - "fmt" - "net/http" - "net/http/httptest" - "testing" -) - -func TestGetLatestUpdateFromURL(t *testing.T) { - testCases := []struct { - name string - handler http.HandlerFunc - expectError bool - expectedVersion string - expectedURL string - }{ - { - name: "Valid latest.json", - handler: func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintln(w, `{"version": "v1.1.0", "url": "http://example.com/release.zip"}`) - }, - expectedVersion: "v1.1.0", - expectedURL: "http://example.com/release.zip", - }, - { - name: "Invalid JSON", - handler: func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintln(w, `{"version": "v1.1.0", "url": "http://example.com/release.zip"`) // Missing closing brace - }, - expectError: true, - }, - { - name: "Missing version", - handler: func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintln(w, `{"url": "http://example.com/release.zip"}`) - }, - expectError: true, - }, - { - name: "Missing URL", - handler: func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintln(w, `{"version": "v1.1.0"}`) - }, - expectError: true, - }, - { - name: "Server error", - handler: func(w http.ResponseWriter, r *http.Request) { - http.Error(w, "Internal Server Error", http.StatusInternalServerError) - }, - expectError: true, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - server := httptest.NewServer(tc.handler) - defer server.Close() - - info, err := GetLatestUpdateFromURL(server.URL) - - if (err != nil) != tc.expectError { - t.Errorf("Expected error: %v, got: %v", tc.expectError, err) - } - - if !tc.expectError { - if info.Version != tc.expectedVersion { - t.Errorf("Expected version: %s, got: %s", tc.expectedVersion, info.Version) - } - if info.URL != tc.expectedURL { - t.Errorf("Expected URL: %s, got: %s", tc.expectedURL, info.URL) - } - } - }) - } -} diff --git a/pkg/updater/github.go b/pkg/updater/github.go deleted file mode 100644 index 676720e..0000000 --- a/pkg/updater/github.go +++ /dev/null @@ -1,302 +0,0 @@ -package updater - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "os" - "runtime" - "strings" - - "golang.org/x/oauth2" -) - -// Repo represents a repository from the GitHub API. -type Repo struct { - CloneURL string `json:"clone_url"` // The URL to clone the repository. -} - -// ReleaseAsset represents a single asset from a GitHub release. -type ReleaseAsset struct { - Name string `json:"name"` // The name of the asset. - DownloadURL string `json:"browser_download_url"` // The URL to download the asset. -} - -// Release represents a GitHub release. -type Release struct { - TagName string `json:"tag_name"` // The name of the tag for the release. - PreRelease bool `json:"prerelease"` // Indicates if the release is a pre-release. - Assets []ReleaseAsset `json:"assets"` // A list of assets associated with the release. -} - -// GithubClient defines the interface for interacting with the GitHub API. -// This allows for mocking the client in tests. -type GithubClient interface { - // GetPublicRepos fetches the public repositories for a user or organization. - GetPublicRepos(ctx context.Context, userOrOrg string) ([]string, error) - // GetLatestRelease fetches the latest release for a given repository and channel. - GetLatestRelease(ctx context.Context, owner, repo, channel string) (*Release, error) - // GetReleaseByPullRequest fetches a release associated with a specific pull request number. - GetReleaseByPullRequest(ctx context.Context, owner, repo string, prNumber int) (*Release, error) -} - -type githubClient struct{} - -// NewAuthenticatedClient creates a new HTTP client that authenticates with the GitHub API. -// It uses the GITHUB_TOKEN environment variable for authentication. -// If the token is not set, it returns the default HTTP client. -var NewAuthenticatedClient = func(ctx context.Context) *http.Client { - token := os.Getenv("GITHUB_TOKEN") - if token == "" { - return http.DefaultClient - } - ts := oauth2.StaticTokenSource( - &oauth2.Token{AccessToken: token}, - ) - return oauth2.NewClient(ctx, ts) -} - -func (g *githubClient) GetPublicRepos(ctx context.Context, userOrOrg string) ([]string, error) { - return g.getPublicReposWithAPIURL(ctx, "https://api.github.com", userOrOrg) -} - -func (g *githubClient) getPublicReposWithAPIURL(ctx context.Context, apiURL, userOrOrg string) ([]string, error) { - client := NewAuthenticatedClient(ctx) - var allCloneURLs []string - url := fmt.Sprintf("%s/users/%s/repos", apiURL, userOrOrg) - - for { - if err := ctx.Err(); err != nil { - return nil, err - } - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return nil, err - } - req.Header.Set("User-Agent", "Borg-Data-Collector") - resp, err := client.Do(req) - if err != nil { - return nil, err - } - - if resp.StatusCode != http.StatusOK { - resp.Body.Close() - // Try organization endpoint - url = fmt.Sprintf("%s/orgs/%s/repos", apiURL, userOrOrg) - req, err = http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return nil, err - } - req.Header.Set("User-Agent", "Borg-Data-Collector") - resp, err = client.Do(req) - if err != nil { - return nil, err - } - } - - if resp.StatusCode != http.StatusOK { - resp.Body.Close() - return nil, fmt.Errorf("failed to fetch repos: %s", resp.Status) - } - - var repos []Repo - if err := json.NewDecoder(resp.Body).Decode(&repos); err != nil { - resp.Body.Close() - return nil, err - } - resp.Body.Close() - - for _, repo := range repos { - allCloneURLs = append(allCloneURLs, repo.CloneURL) - } - - linkHeader := resp.Header.Get("Link") - if linkHeader == "" { - break - } - nextURL := g.findNextURL(linkHeader) - if nextURL == "" { - break - } - url = nextURL - } - - return allCloneURLs, nil -} - -func (g *githubClient) findNextURL(linkHeader string) string { - links := strings.Split(linkHeader, ",") - for _, link := range links { - parts := strings.Split(link, ";") - if len(parts) == 2 && strings.TrimSpace(parts[1]) == `rel="next"` { - return strings.Trim(strings.TrimSpace(parts[0]), "<>") - } - } - return "" -} - -// GetLatestRelease fetches the latest release for a given repository and channel. -// The channel can be "stable", "beta", or "alpha". -func (g *githubClient) GetLatestRelease(ctx context.Context, owner, repo, channel string) (*Release, error) { - client := NewAuthenticatedClient(ctx) - url := fmt.Sprintf("https://api.github.com/repos/%s/%s/releases", owner, repo) - - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return nil, err - } - req.Header.Set("User-Agent", "Borg-Data-Collector") - - resp, err := client.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("failed to fetch releases: %s", resp.Status) - } - - var releases []Release - if err := json.NewDecoder(resp.Body).Decode(&releases); err != nil { - return nil, err - } - - return filterReleases(releases, channel), nil -} - -// filterReleases filters releases based on the specified channel. -func filterReleases(releases []Release, channel string) *Release { - for _, release := range releases { - releaseChannel := determineChannel(release.TagName, release.PreRelease) - if releaseChannel == channel { - return &release - } - } - return nil -} - -// determineChannel determines the stability channel of a release based on its tag and PreRelease flag. -func determineChannel(tagName string, isPreRelease bool) string { - tagLower := strings.ToLower(tagName) - if strings.Contains(tagLower, "alpha") { - return "alpha" - } - if strings.Contains(tagLower, "beta") { - return "beta" - } - if isPreRelease { // A pre-release without alpha/beta is treated as beta - return "beta" - } - return "stable" -} - -// GetReleaseByPullRequest fetches a release associated with a specific pull request number. -func (g *githubClient) GetReleaseByPullRequest(ctx context.Context, owner, repo string, prNumber int) (*Release, error) { - client := NewAuthenticatedClient(ctx) - url := fmt.Sprintf("https://api.github.com/repos/%s/%s/releases", owner, repo) - - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return nil, err - } - req.Header.Set("User-Agent", "Borg-Data-Collector") - - resp, err := client.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("failed to fetch releases: %s", resp.Status) - } - - var releases []Release - if err := json.NewDecoder(resp.Body).Decode(&releases); err != nil { - return nil, err - } - - // The pr number is included in the tag name with the format `vX.Y.Z-alpha.pr.123` or `vX.Y.Z-beta.pr.123` - prTagSuffix := fmt.Sprintf(".pr.%d", prNumber) - for _, release := range releases { - if strings.Contains(release.TagName, prTagSuffix) { - return &release, nil - } - } - - return nil, nil // No release found for the given PR number -} - -// GetDownloadURL finds the appropriate download URL for the current operating system and architecture. -// -// It supports two modes of operation: -// 1. Using a 'releaseURLFormat' template: If 'releaseURLFormat' is provided, -// it will be used to construct the download URL. The template can contain -// placeholders for the release tag '{tag}', operating system '{os}', and -// architecture '{arch}'. -// 2. Automatic detection: If 'releaseURLFormat' is empty, the function will -// inspect the assets of the release to find a suitable download URL. It -// searches for an asset name that contains both the current OS and architecture -// (e.g., "my-app-linux-amd64"). If no match is found, it falls back to -// matching only the OS. -// -// Example with releaseURLFormat: -// -// release := &updater.Release{TagName: "v1.2.3"} -// url, err := updater.GetDownloadURL(release, "https://example.com/downloads/{tag}/{os}/{arch}") -// if err != nil { -// // handle error -// } -// fmt.Println(url) // "https://example.com/downloads/v1.2.3/linux/amd64" (on a Linux AMD64 system) -// -// Example with automatic detection: -// -// release := &updater.Release{ -// Assets: []updater.ReleaseAsset{ -// {Name: "my-app-linux-amd64", DownloadURL: "https://example.com/download/linux-amd64"}, -// {Name: "my-app-windows-amd64", DownloadURL: "https://example.com/download/windows-amd64"}, -// }, -// } -// url, err := updater.GetDownloadURL(release, "") -// if err != nil { -// // handle error -// } -// fmt.Println(url) // "https://example.com/download/linux-amd64" (on a Linux AMD64 system) -func GetDownloadURL(release *Release, releaseURLFormat string) (string, error) { - if release == nil { - return "", fmt.Errorf("no release provided") - } - - if releaseURLFormat != "" { - // Replace {tag}, {os}, and {arch} placeholders - r := strings.NewReplacer( - "{tag}", release.TagName, - "{os}", runtime.GOOS, - "{arch}", runtime.GOARCH, - ) - return r.Replace(releaseURLFormat), nil - } - - osName := runtime.GOOS - archName := runtime.GOARCH - - for _, asset := range release.Assets { - assetNameLower := strings.ToLower(asset.Name) - // Match asset that contains both OS and architecture - if strings.Contains(assetNameLower, osName) && strings.Contains(assetNameLower, archName) { - return asset.DownloadURL, nil - } - } - - // Fallback for OS only if no asset matched both OS and arch - for _, asset := range release.Assets { - assetNameLower := strings.ToLower(asset.Name) - if strings.Contains(assetNameLower, osName) { - return asset.DownloadURL, nil - } - } - - return "", fmt.Errorf("no suitable download asset found for %s/%s", osName, archName) -} diff --git a/pkg/updater/github_test.go b/pkg/updater/github_test.go deleted file mode 100644 index fdeb1a2..0000000 --- a/pkg/updater/github_test.go +++ /dev/null @@ -1,124 +0,0 @@ -package updater - -import ( - "bytes" - "context" - "io" - "net/http" - "net/url" - "testing" - - "github.com/Snider/Borg/pkg/mocks" -) - -func TestGetPublicRepos(t *testing.T) { - mockClient := mocks.NewMockClient(map[string]*http.Response{ - "https://api.github.com/users/testuser/repos": { - StatusCode: http.StatusOK, - Header: http.Header{"Content-Type": []string{"application/json"}}, - Body: io.NopCloser(bytes.NewBufferString(`[{"clone_url": "https://github.com/testuser/repo1.git"}]`)), - }, - "https://api.github.com/orgs/testorg/repos": { - StatusCode: http.StatusOK, - Header: http.Header{"Content-Type": []string{"application/json"}, "Link": []string{`; rel="next"`}}, - Body: io.NopCloser(bytes.NewBufferString(`[{"clone_url": "https://github.com/testorg/repo1.git"}]`)), - }, - "https://api.github.com/organizations/123/repos?page=2": { - StatusCode: http.StatusOK, - Header: http.Header{"Content-Type": []string{"application/json"}}, - Body: io.NopCloser(bytes.NewBufferString(`[{"clone_url": "https://github.com/testorg/repo2.git"}]`)), - }, - }) - - client := &githubClient{} - oldClient := NewAuthenticatedClient - NewAuthenticatedClient = func(ctx context.Context) *http.Client { - return mockClient - } - defer func() { - NewAuthenticatedClient = oldClient - }() - - // Test user repos - repos, err := client.getPublicReposWithAPIURL(context.Background(), "https://api.github.com", "testuser") - if err != nil { - t.Fatalf("getPublicReposWithAPIURL for user failed: %v", err) - } - if len(repos) != 1 || repos[0] != "https://github.com/testuser/repo1.git" { - t.Errorf("unexpected user repos: %v", repos) - } - - // Test org repos with pagination - repos, err = client.getPublicReposWithAPIURL(context.Background(), "https://api.github.com", "testorg") - if err != nil { - t.Fatalf("getPublicReposWithAPIURL for org failed: %v", err) - } - if len(repos) != 2 || repos[0] != "https://github.com/testorg/repo1.git" || repos[1] != "https://github.com/testorg/repo2.git" { - t.Errorf("unexpected org repos: %v", repos) - } -} -func TestGetPublicRepos_Error(t *testing.T) { - u, _ := url.Parse("https://api.github.com/users/testuser/repos") - mockClient := mocks.NewMockClient(map[string]*http.Response{ - "https://api.github.com/users/testuser/repos": { - StatusCode: http.StatusNotFound, - Status: "404 Not Found", - Header: http.Header{"Content-Type": []string{"application/json"}}, - Body: io.NopCloser(bytes.NewBufferString("")), - Request: &http.Request{Method: "GET", URL: u}, - }, - "https://api.github.com/orgs/testuser/repos": { - StatusCode: http.StatusNotFound, - Status: "404 Not Found", - Header: http.Header{"Content-Type": []string{"application/json"}}, - Body: io.NopCloser(bytes.NewBufferString("")), - Request: &http.Request{Method: "GET", URL: u}, - }, - }) - expectedErr := "failed to fetch repos: 404 Not Found" - - client := &githubClient{} - oldClient := NewAuthenticatedClient - NewAuthenticatedClient = func(ctx context.Context) *http.Client { - return mockClient - } - defer func() { - NewAuthenticatedClient = oldClient - }() - - // Test user repos - _, err := client.getPublicReposWithAPIURL(context.Background(), "https://api.github.com", "testuser") - if err.Error() != expectedErr { - t.Fatalf("getPublicReposWithAPIURL for user failed: %v", err) - } -} - -func TestFindNextURL(t *testing.T) { - client := &githubClient{} - linkHeader := `; rel="next", ; rel="prev"` - nextURL := client.findNextURL(linkHeader) - if nextURL != "https://api.github.com/organizations/123/repos?page=2" { - t.Errorf("unexpected next URL: %s", nextURL) - } - - linkHeader = `; rel="prev"` - nextURL = client.findNextURL(linkHeader) - if nextURL != "" { - t.Errorf("unexpected next URL: %s", nextURL) - } -} - -func TestNewAuthenticatedClient(t *testing.T) { - // Test with no token - client := NewAuthenticatedClient(context.Background()) - if client != http.DefaultClient { - t.Errorf("expected http.DefaultClient, but got something else") - } - - // Test with token - t.Setenv("GITHUB_TOKEN", "test-token") - client = NewAuthenticatedClient(context.Background()) - if client == http.DefaultClient { - t.Errorf("expected an authenticated client, but got http.DefaultClient") - } -} diff --git a/pkg/updater/mock_github_client_test.go b/pkg/updater/mock_github_client_test.go deleted file mode 100644 index 30db69d..0000000 --- a/pkg/updater/mock_github_client_test.go +++ /dev/null @@ -1,36 +0,0 @@ -package updater - -import ( - "context" -) - -// MockGithubClient is a mock implementation of the GithubClient interface for testing. -type MockGithubClient struct { - GetLatestReleaseFunc func(ctx context.Context, owner, repo, channel string) (*Release, error) - GetReleaseByPullRequestFunc func(ctx context.Context, owner, repo string, prNumber int) (*Release, error) - GetPublicReposFunc func(ctx context.Context, userOrOrg string) ([]string, error) -} - -// GetLatestRelease mocks the GetLatestRelease method of the GithubClient interface. -func (m *MockGithubClient) GetLatestRelease(ctx context.Context, owner, repo, channel string) (*Release, error) { - if m.GetLatestReleaseFunc != nil { - return m.GetLatestReleaseFunc(ctx, owner, repo, channel) - } - return nil, nil -} - -// GetReleaseByPullRequest mocks the GetReleaseByPullRequest method of the GithubClient interface. -func (m *MockGithubClient) GetReleaseByPullRequest(ctx context.Context, owner, repo string, prNumber int) (*Release, error) { - if m.GetReleaseByPullRequestFunc != nil { - return m.GetReleaseByPullRequestFunc(ctx, owner, repo, prNumber) - } - return nil, nil -} - -// GetPublicRepos mocks the GetPublicRepos method of the GithubClient interface. -func (m *MockGithubClient) GetPublicRepos(ctx context.Context, userOrOrg string) ([]string, error) { - if m.GetPublicReposFunc != nil { - return m.GetPublicReposFunc(ctx, userOrOrg) - } - return []string{"repo1", "repo2"}, nil -} diff --git a/pkg/updater/package.json b/pkg/updater/package.json deleted file mode 100644 index 55c42e4..0000000 --- a/pkg/updater/package.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "updater", - "version": "1.2.3" -} diff --git a/pkg/updater/service.go b/pkg/updater/service.go deleted file mode 100644 index 4c57066..0000000 --- a/pkg/updater/service.go +++ /dev/null @@ -1,127 +0,0 @@ -//go:generate go run github.com/host-uk/core/pkg/updater/build - -// Package updater provides functionality for self-updating Go applications. -// It supports updates from GitHub releases and generic HTTP endpoints. -package updater - -import ( - "fmt" - "net/url" - "strings" -) - -// StartupCheckMode defines the updater's behavior on startup. -type StartupCheckMode int - -const ( - // NoCheck disables any checks on startup. - NoCheck StartupCheckMode = iota - // CheckOnStartup checks for updates on startup but does not apply them. - CheckOnStartup - // CheckAndUpdateOnStartup checks for and applies updates on startup. - CheckAndUpdateOnStartup -) - -// UpdateServiceConfig holds the configuration for the UpdateService. -type UpdateServiceConfig struct { - // RepoURL is the URL to the repository for updates. It can be a GitHub - // repository URL (e.g., "https://github.com/owner/repo") or a base URL - // for a generic HTTP update server. - RepoURL string - // Channel specifies the release channel to track (e.g., "stable", "prerelease"). - // This is only used for GitHub-based updates. - Channel string - // CheckOnStartup determines the update behavior when the service starts. - CheckOnStartup StartupCheckMode - // ForceSemVerPrefix toggles whether to enforce a 'v' prefix on version tags for display. - // If true, a 'v' prefix is added if missing. If false, it's removed if present. - ForceSemVerPrefix bool - // ReleaseURLFormat provides a template for constructing the download URL for a - // release asset. The placeholder {tag} will be replaced with the release tag. - ReleaseURLFormat string -} - -// UpdateService provides a configurable interface for handling application updates. -// It can be configured to check for updates on startup and, if desired, apply -// them automatically. The service can handle updates from both GitHub releases -// and generic HTTP servers. -type UpdateService struct { - config UpdateServiceConfig - isGitHub bool - owner string - repo string -} - -// NewUpdateService creates and configures a new UpdateService. -// It parses the repository URL to determine if it's a GitHub repository -// and extracts the owner and repo name. -func NewUpdateService(config UpdateServiceConfig) (*UpdateService, error) { - isGitHub := strings.Contains(config.RepoURL, "github.com") - var owner, repo string - var err error - - if isGitHub { - owner, repo, err = ParseRepoURL(config.RepoURL) - if err != nil { - return nil, fmt.Errorf("failed to parse GitHub repo URL: %w", err) - } - } - - return &UpdateService{ - config: config, - isGitHub: isGitHub, - owner: owner, - repo: repo, - }, nil -} - -// Start initiates the update check based on the service configuration. -// It determines whether to perform a GitHub or HTTP-based update check -// based on the RepoURL. The behavior of the check is controlled by the -// CheckOnStartup setting in the configuration. -func (s *UpdateService) Start() error { - if s.isGitHub { - return s.startGitHubCheck() - } - return s.startHTTPCheck() -} - -func (s *UpdateService) startGitHubCheck() error { - switch s.config.CheckOnStartup { - case NoCheck: - return nil // Do nothing - case CheckOnStartup: - return CheckOnly(s.owner, s.repo, s.config.Channel, s.config.ForceSemVerPrefix, s.config.ReleaseURLFormat) - case CheckAndUpdateOnStartup: - return CheckForUpdates(s.owner, s.repo, s.config.Channel, s.config.ForceSemVerPrefix, s.config.ReleaseURLFormat) - default: - return fmt.Errorf("unknown startup check mode: %d", s.config.CheckOnStartup) - } -} - -func (s *UpdateService) startHTTPCheck() error { - switch s.config.CheckOnStartup { - case NoCheck: - return nil // Do nothing - case CheckOnStartup: - return CheckOnlyHTTP(s.config.RepoURL) - case CheckAndUpdateOnStartup: - return CheckForUpdatesHTTP(s.config.RepoURL) - default: - return fmt.Errorf("unknown startup check mode: %d", s.config.CheckOnStartup) - } -} - -// ParseRepoURL extracts the owner and repository name from a GitHub URL. -// It handles standard GitHub URL formats. -func ParseRepoURL(repoURL string) (owner string, repo string, err error) { - u, err := url.Parse(repoURL) - if err != nil { - return "", "", err - } - parts := strings.Split(strings.Trim(u.Path, "/"), "/") - if len(parts) < 2 { - return "", "", fmt.Errorf("invalid repo URL path: %s", u.Path) - } - return parts[0], parts[1], nil -} diff --git a/pkg/updater/service_examples_test.go b/pkg/updater/service_examples_test.go deleted file mode 100644 index 542697a..0000000 --- a/pkg/updater/service_examples_test.go +++ /dev/null @@ -1,42 +0,0 @@ -package updater_test - -import ( - "fmt" - "log" - - "github.com/host-uk/core/pkg/updater" -) - -func ExampleNewUpdateService() { - // Mock the update check functions to prevent actual updates during tests - updater.CheckForUpdates = func(owner, repo, channel string, forceSemVerPrefix bool, releaseURLFormat string) error { - fmt.Println("CheckForUpdates called") - return nil - } - defer func() { - updater.CheckForUpdates = nil // Restore original function - }() - - config := updater.UpdateServiceConfig{ - RepoURL: "https://github.com/owner/repo", - Channel: "stable", - CheckOnStartup: updater.CheckAndUpdateOnStartup, - } - updateService, err := updater.NewUpdateService(config) - if err != nil { - log.Fatalf("Failed to create update service: %v", err) - } - if err := updateService.Start(); err != nil { - log.Printf("Update check failed: %v", err) - } - // Output: CheckForUpdates called -} - -func ExampleParseRepoURL() { - owner, repo, err := updater.ParseRepoURL("https://github.com/owner/repo") - if err != nil { - log.Fatalf("Failed to parse repo URL: %v", err) - } - fmt.Printf("Owner: %s, Repo: %s", owner, repo) - // Output: Owner: owner, Repo: repo -} diff --git a/pkg/updater/service_test.go b/pkg/updater/service_test.go deleted file mode 100644 index 5f12b3b..0000000 --- a/pkg/updater/service_test.go +++ /dev/null @@ -1,170 +0,0 @@ -package updater - -import ( - "net/http" - "net/http/httptest" - "testing" -) - -func TestNewUpdateService(t *testing.T) { - testCases := []struct { - name string - config UpdateServiceConfig - expectError bool - isGitHub bool - }{ - { - name: "Valid GitHub URL", - config: UpdateServiceConfig{ - RepoURL: "https://github.com/owner/repo", - }, - isGitHub: true, - }, - { - name: "Valid non-GitHub URL", - config: UpdateServiceConfig{ - RepoURL: "https://example.com/updates", - }, - isGitHub: false, - }, - { - name: "Invalid GitHub URL", - config: UpdateServiceConfig{ - RepoURL: "https://github.com/owner", - }, - expectError: true, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - service, err := NewUpdateService(tc.config) - if (err != nil) != tc.expectError { - t.Errorf("Expected error: %v, got: %v", tc.expectError, err) - } - if err == nil && service.isGitHub != tc.isGitHub { - t.Errorf("Expected isGitHub: %v, got: %v", tc.isGitHub, service.isGitHub) - } - }) - } -} - -func TestUpdateService_Start(t *testing.T) { - // Setup a mock server for HTTP tests - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(`{"version": "v1.1.0", "url": "http://example.com/release.zip"}`)) - })) - defer server.Close() - - testCases := []struct { - name string - config UpdateServiceConfig - checkOnlyGitHub int - checkAndDoGitHub int - checkOnlyHTTPCalls int - checkAndDoHTTPCalls int - expectError bool - }{ - { - name: "GitHub: NoCheck", - config: UpdateServiceConfig{ - RepoURL: "https://github.com/owner/repo", - CheckOnStartup: NoCheck, - }, - }, - { - name: "GitHub: CheckOnStartup", - config: UpdateServiceConfig{ - RepoURL: "https://github.com/owner/repo", - CheckOnStartup: CheckOnStartup, - }, - checkOnlyGitHub: 1, - }, - { - name: "GitHub: CheckAndUpdateOnStartup", - config: UpdateServiceConfig{ - RepoURL: "https://github.com/owner/repo", - CheckOnStartup: CheckAndUpdateOnStartup, - }, - checkAndDoGitHub: 1, - }, - { - name: "HTTP: NoCheck", - config: UpdateServiceConfig{ - RepoURL: server.URL, - CheckOnStartup: NoCheck, - }, - }, - { - name: "HTTP: CheckOnStartup", - config: UpdateServiceConfig{ - RepoURL: server.URL, - CheckOnStartup: CheckOnStartup, - }, - checkOnlyHTTPCalls: 1, - }, - { - name: "HTTP: CheckAndUpdateOnStartup", - config: UpdateServiceConfig{ - RepoURL: server.URL, - CheckOnStartup: CheckAndUpdateOnStartup, - }, - checkAndDoHTTPCalls: 1, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - var checkOnlyGitHub, checkAndDoGitHub, checkOnlyHTTP, checkAndDoHTTP int - - // Mock GitHub functions - originalCheckOnly := CheckOnly - CheckOnly = func(owner, repo, channel string, forceSemVerPrefix bool, releaseURLFormat string) error { - checkOnlyGitHub++ - return nil - } - defer func() { CheckOnly = originalCheckOnly }() - - originalCheckForUpdates := CheckForUpdates - CheckForUpdates = func(owner, repo, channel string, forceSemVerPrefix bool, releaseURLFormat string) error { - checkAndDoGitHub++ - return nil - } - defer func() { CheckForUpdates = originalCheckForUpdates }() - - // Mock HTTP functions - originalCheckOnlyHTTP := CheckOnlyHTTP - CheckOnlyHTTP = func(baseURL string) error { - checkOnlyHTTP++ - return nil - } - defer func() { CheckOnlyHTTP = originalCheckOnlyHTTP }() - - originalCheckForUpdatesHTTP := CheckForUpdatesHTTP - CheckForUpdatesHTTP = func(baseURL string) error { - checkAndDoHTTP++ - return nil - } - defer func() { CheckForUpdatesHTTP = originalCheckForUpdatesHTTP }() - - service, _ := NewUpdateService(tc.config) - err := service.Start() - - if (err != nil) != tc.expectError { - t.Errorf("Expected error: %v, got: %v", tc.expectError, err) - } - if checkOnlyGitHub != tc.checkOnlyGitHub { - t.Errorf("Expected GitHub CheckOnly calls: %d, got: %d", tc.checkOnlyGitHub, checkOnlyGitHub) - } - if checkAndDoGitHub != tc.checkAndDoGitHub { - t.Errorf("Expected GitHub CheckForUpdates calls: %d, got: %d", tc.checkAndDoGitHub, checkAndDoGitHub) - } - if checkOnlyHTTP != tc.checkOnlyHTTPCalls { - t.Errorf("Expected HTTP CheckOnly calls: %d, got: %d", tc.checkOnlyHTTPCalls, checkOnlyHTTP) - } - if checkAndDoHTTP != tc.checkAndDoHTTPCalls { - t.Errorf("Expected HTTP CheckForUpdates calls: %d, got: %d", tc.checkAndDoHTTPCalls, checkAndDoHTTP) - } - }) - } -} diff --git a/pkg/updater/tests.patch b/pkg/updater/tests.patch deleted file mode 100644 index e69de29..0000000 diff --git a/pkg/updater/ui/.editorconfig b/pkg/updater/ui/.editorconfig deleted file mode 100644 index f166060..0000000 --- a/pkg/updater/ui/.editorconfig +++ /dev/null @@ -1,17 +0,0 @@ -# Editor configuration, see https://editorconfig.org -root = true - -[*] -charset = utf-8 -indent_style = space -indent_size = 2 -insert_final_newline = true -trim_trailing_whitespace = true - -[*.ts] -quote_type = single -ij_typescript_use_double_quotes = false - -[*.md] -max_line_length = off -trim_trailing_whitespace = false diff --git a/pkg/updater/ui/.gitignore b/pkg/updater/ui/.gitignore deleted file mode 100644 index b1d225e..0000000 --- a/pkg/updater/ui/.gitignore +++ /dev/null @@ -1,43 +0,0 @@ -# See https://docs.github.com/get-started/getting-started-with-git/ignoring-files for more about ignoring files. - -# Compiled output -/dist -/tmp -/out-tsc -/bazel-out - -# Node -/node_modules -npm-debug.log -yarn-error.log - -# IDEs and editors -.idea/ -.project -.classpath -.c9/ -*.launch -.settings/ -*.sublime-workspace - -# Visual Studio Code -.vscode/* -!.vscode/settings.json -!.vscode/tasks.json -!.vscode/launch.json -!.vscode/extensions.json -.history/* - -# Miscellaneous -/.angular/cache -.sass-cache/ -/connect.lock -/coverage -/libpeerconnection.log -testem.log -/typings -__screenshots__/ - -# System files -.DS_Store -Thumbs.db diff --git a/pkg/updater/ui/.vscode/extensions.json b/pkg/updater/ui/.vscode/extensions.json deleted file mode 100644 index 77b3745..0000000 --- a/pkg/updater/ui/.vscode/extensions.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=827846 - "recommendations": ["angular.ng-template"] -} diff --git a/pkg/updater/ui/.vscode/launch.json b/pkg/updater/ui/.vscode/launch.json deleted file mode 100644 index 925af83..0000000 --- a/pkg/updater/ui/.vscode/launch.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "0.2.0", - "configurations": [ - { - "name": "ng serve", - "type": "chrome", - "request": "launch", - "preLaunchTask": "npm: start", - "url": "http://localhost:4200/" - }, - { - "name": "ng test", - "type": "chrome", - "request": "launch", - "preLaunchTask": "npm: test", - "url": "http://localhost:9876/debug.html" - } - ] -} diff --git a/pkg/updater/ui/.vscode/tasks.json b/pkg/updater/ui/.vscode/tasks.json deleted file mode 100644 index a298b5b..0000000 --- a/pkg/updater/ui/.vscode/tasks.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - // For more information, visit: https://go.microsoft.com/fwlink/?LinkId=733558 - "version": "2.0.0", - "tasks": [ - { - "type": "npm", - "script": "start", - "isBackground": true, - "problemMatcher": { - "owner": "typescript", - "pattern": "$tsc", - "background": { - "activeOnStart": true, - "beginsPattern": { - "regexp": "(.*?)" - }, - "endsPattern": { - "regexp": "bundle generation complete" - } - } - } - }, - { - "type": "npm", - "script": "test", - "isBackground": true, - "problemMatcher": { - "owner": "typescript", - "pattern": "$tsc", - "background": { - "activeOnStart": true, - "beginsPattern": { - "regexp": "(.*?)" - }, - "endsPattern": { - "regexp": "bundle generation complete" - } - } - } - } - ] -} diff --git a/pkg/updater/ui/README.md b/pkg/updater/ui/README.md deleted file mode 100644 index 4e1bfe1..0000000 --- a/pkg/updater/ui/README.md +++ /dev/null @@ -1,59 +0,0 @@ -# CoreElementTemplate - -This project was generated using [Angular CLI](https://github.com/angular/angular-cli) version 20.3.9. - -## Development server - -To start a local development server, run: - -```bash -ng serve -``` - -Once the server is running, open your browser and navigate to `http://localhost:4200/`. The application will automatically reload whenever you modify any of the source files. - -## Code scaffolding - -Angular CLI includes powerful code scaffolding tools. To generate a new component, run: - -```bash -ng generate component component-name -``` - -For a complete list of available schematics (such as `components`, `directives`, or `pipes`), run: - -```bash -ng generate --help -``` - -## Building - -To build the project run: - -```bash -ng build -``` - -This will compile your project and store the build artifacts in the `dist/` directory. By default, the production build optimizes your application for performance and speed. - -## Running unit tests - -To execute unit tests with the [Karma](https://karma-runner.github.io) test runner, use the following command: - -```bash -ng test -``` - -## Running end-to-end tests - -For end-to-end (e2e) testing, run: - -```bash -ng e2e -``` - -Angular CLI does not come with an end-to-end testing framework by default. You can choose one that suits your needs. - -## Additional Resources - -For more information on using the Angular CLI, including detailed command references, visit the [Angular CLI Overview and Command Reference](https://angular.dev/tools/cli) page. diff --git a/pkg/updater/ui/angular.json b/pkg/updater/ui/angular.json deleted file mode 100644 index b0b85e7..0000000 --- a/pkg/updater/ui/angular.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "$schema": "./node_modules/@angular/cli/lib/config/schema.json", - "version": 1, - "newProjectRoot": "projects", - "projects": { - "core-element-template": { - "projectType": "application", - "schematics": { - "@schematics/angular:component": { - "standalone": false - }, - "@schematics/angular:directive": { - "standalone": false - }, - "@schematics/angular:pipe": { - "standalone": false - } - }, - "root": "", - "sourceRoot": "src", - "prefix": "app", - "architect": { - "build": { - "builder": "@angular/build:application", - "options": { - "browser": "src/main.ts", - "polyfills": [ - "zone.js" - ], - "tsConfig": "tsconfig.app.json", - "assets": [ - { - "glob": "**/*", - "input": "public" - } - ], - "styles": [] - }, - "configurations": { - "production": { - "budgets": [ - { - "type": "initial", - "maximumWarning": "500kB", - "maximumError": "1MB" - }, - { - "type": "anyComponentStyle", - "maximumWarning": "4kB", - "maximumError": "8kB" - } - ], - "outputHashing": "none" - }, - "development": { - "optimization": false, - "extractLicenses": false, - "sourceMap": true - } - }, - "defaultConfiguration": "production" - }, - "serve": { - "builder": "@angular/build:dev-server", - "configurations": { - "production": { - "buildTarget": "core-element-template:build:production" - }, - "development": { - "buildTarget": "core-element-template:build:development" - } - }, - "defaultConfiguration": "development" - }, - "extract-i18n": { - "builder": "@angular/build:extract-i18n" - } - } - } - } -} diff --git a/pkg/updater/ui/package-lock.json b/pkg/updater/ui/package-lock.json deleted file mode 100644 index 5fc1044..0000000 --- a/pkg/updater/ui/package-lock.json +++ /dev/null @@ -1,9696 +0,0 @@ -{ - "name": "core-element-template", - "version": "0.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "core-element-template", - "version": "0.0.0", - "dependencies": { - "@angular/common": "^20.3.0", - "@angular/compiler": "^20.3.0", - "@angular/core": "^20.3.0", - "@angular/elements": "^20.3.10", - "@angular/forms": "^20.3.0", - "@angular/platform-browser": "^20.3.0", - "@angular/router": "^20.3.0", - "rxjs": "~7.8.0", - "tslib": "^2.3.0", - "zone.js": "~0.15.0" - }, - "devDependencies": { - "@angular/build": "^20.3.9", - "@angular/cli": "^20.3.9", - "@angular/compiler-cli": "^20.3.0", - "@types/jasmine": "~5.1.0", - "jasmine-core": "~5.9.0", - "karma": "~6.4.0", - "karma-chrome-launcher": "~3.2.0", - "karma-coverage": "~2.2.0", - "karma-jasmine": "~5.1.0", - "karma-jasmine-html-reporter": "~2.1.0", - "typescript": "~5.9.2" - } - }, - "node_modules/@algolia/abtesting": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@algolia/abtesting/-/abtesting-1.1.0.tgz", - "integrity": "sha512-sEyWjw28a/9iluA37KLGu8vjxEIlb60uxznfTUmXImy7H5NvbpSO6yYgmgH5KiD7j+zTUUihiST0jEP12IoXow==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-abtesting": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-abtesting/-/client-abtesting-5.35.0.tgz", - "integrity": "sha512-uUdHxbfHdoppDVflCHMxRlj49/IllPwwQ2cQ8DLC4LXr3kY96AHBpW0dMyi6ygkn2MtFCc6BxXCzr668ZRhLBQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-analytics": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-5.35.0.tgz", - "integrity": "sha512-SunAgwa9CamLcRCPnPHx1V2uxdQwJGqb1crYrRWktWUdld0+B2KyakNEeVn5lln4VyeNtW17Ia7V7qBWyM/Skw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-common": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.35.0.tgz", - "integrity": "sha512-ipE0IuvHu/bg7TjT2s+187kz/E3h5ssfTtjpg1LbWMgxlgiaZIgTTbyynM7NfpSJSKsgQvCQxWjGUO51WSCu7w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-insights": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-insights/-/client-insights-5.35.0.tgz", - "integrity": "sha512-UNbCXcBpqtzUucxExwTSfAe8gknAJ485NfPN6o1ziHm6nnxx97piIbcBQ3edw823Tej2Wxu1C0xBY06KgeZ7gA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-personalization": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-5.35.0.tgz", - "integrity": "sha512-/KWjttZ6UCStt4QnWoDAJ12cKlQ+fkpMtyPmBgSS2WThJQdSV/4UWcqCUqGH7YLbwlj3JjNirCu3Y7uRTClxvA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-query-suggestions": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-query-suggestions/-/client-query-suggestions-5.35.0.tgz", - "integrity": "sha512-8oCuJCFf/71IYyvQQC+iu4kgViTODbXDk3m7yMctEncRSRV+u2RtDVlpGGfPlJQOrAY7OONwJlSHkmbbm2Kp/w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-search": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-5.35.0.tgz", - "integrity": "sha512-FfmdHTrXhIduWyyuko1YTcGLuicVbhUyRjO3HbXE4aP655yKZgdTIfMhZ/V5VY9bHuxv/fGEh3Od1Lvv2ODNTg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/ingestion": { - "version": "1.35.0", - "resolved": "https://registry.npmjs.org/@algolia/ingestion/-/ingestion-1.35.0.tgz", - "integrity": "sha512-gPzACem9IL1Co8mM1LKMhzn1aSJmp+Vp434An4C0OBY4uEJRcqsLN3uLBlY+bYvFg8C8ImwM9YRiKczJXRk0XA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/monitoring": { - "version": "1.35.0", - "resolved": "https://registry.npmjs.org/@algolia/monitoring/-/monitoring-1.35.0.tgz", - "integrity": "sha512-w9MGFLB6ashI8BGcQoVt7iLgDIJNCn4OIu0Q0giE3M2ItNrssvb8C0xuwJQyTy1OFZnemG0EB1OvXhIHOvQwWw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/recommend": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-5.35.0.tgz", - "integrity": "sha512-AhrVgaaXAb8Ue0u2nuRWwugt0dL5UmRgS9LXe0Hhz493a8KFeZVUE56RGIV3hAa6tHzmAV7eIoqcWTQvxzlJeQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/requester-browser-xhr": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.35.0.tgz", - "integrity": "sha512-diY415KLJZ6x1Kbwl9u96Jsz0OstE3asjXtJ9pmk1d+5gPuQ5jQyEsgC+WmEXzlec3iuVszm8AzNYYaqw6B+Zw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/requester-fetch": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-fetch/-/requester-fetch-5.35.0.tgz", - "integrity": "sha512-uydqnSmpAjrgo8bqhE9N1wgcB98psTRRQXcjc4izwMB7yRl9C8uuAQ/5YqRj04U0mMQ+fdu2fcNF6m9+Z1BzDQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/requester-node-http": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.35.0.tgz", - "integrity": "sha512-RgLX78ojYOrThJHrIiPzT4HW3yfQa0D7K+MQ81rhxqaNyNBu4F1r+72LNHYH/Z+y9I1Mrjrd/c/Ue5zfDgAEjQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@ampproject/remapping": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", - "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@angular-devkit/architect": { - "version": "0.2003.9", - "resolved": "https://registry.npmjs.org/@angular-devkit/architect/-/architect-0.2003.9.tgz", - "integrity": "sha512-p0GO2H8hiZjRHI9sm4tXTF3OpWaEnkqvB0GBGJfGp8RvpPfDA2t3j2NAUNtd75H+B0xdfyWLmNq9YJGpy6gznA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@angular-devkit/core": "20.3.9", - "rxjs": "7.8.2" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - } - }, - "node_modules/@angular-devkit/core": { - "version": "20.3.9", - "resolved": "https://registry.npmjs.org/@angular-devkit/core/-/core-20.3.9.tgz", - "integrity": "sha512-bXsAGIUb4p60x548YmvnMvjwd3FwWz6re1uTM7dV0XH8nQn3XMhOQ3Q3sAckzJHxkDuaRhB3K/a4kupoOmVfTQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ajv": "8.17.1", - "ajv-formats": "3.0.1", - "jsonc-parser": "3.3.1", - "picomatch": "4.0.3", - "rxjs": "7.8.2", - "source-map": "0.7.6" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - }, - "peerDependencies": { - "chokidar": "^4.0.0" - }, - "peerDependenciesMeta": { - "chokidar": { - "optional": true - } - } - }, - "node_modules/@angular-devkit/schematics": { - "version": "20.3.9", - "resolved": "https://registry.npmjs.org/@angular-devkit/schematics/-/schematics-20.3.9.tgz", - "integrity": "sha512-oaIjAKPmHMZBTC0met5M7dbXBeZnCNwmHacT/kBHNVBAz/NI95fuAfb2P0Jxt7gWdQXejDSxWp0tL+sZIyO0xw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@angular-devkit/core": "20.3.9", - "jsonc-parser": "3.3.1", - "magic-string": "0.30.17", - "ora": "8.2.0", - "rxjs": "7.8.2" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - } - }, - "node_modules/@angular/build": { - "version": "20.3.9", - "resolved": "https://registry.npmjs.org/@angular/build/-/build-20.3.9.tgz", - "integrity": "sha512-Ulimvg6twPSCraaZECEmENfKBlD4M1yqeHlg6dCzFNM4xcwaGUnuG6O3cIQD59DaEvaG73ceM2y8ftYdxAwFow==", - "dev": true, - "license": "MIT", - "dependencies": { - "@ampproject/remapping": "2.3.0", - "@angular-devkit/architect": "0.2003.9", - "@babel/core": "7.28.3", - "@babel/helper-annotate-as-pure": "7.27.3", - "@babel/helper-split-export-declaration": "7.24.7", - "@inquirer/confirm": "5.1.14", - "@vitejs/plugin-basic-ssl": "2.1.0", - "beasties": "0.3.5", - "browserslist": "^4.23.0", - "esbuild": "0.25.9", - "https-proxy-agent": "7.0.6", - "istanbul-lib-instrument": "6.0.3", - "jsonc-parser": "3.3.1", - "listr2": "9.0.1", - "magic-string": "0.30.17", - "mrmime": "2.0.1", - "parse5-html-rewriting-stream": "8.0.0", - "picomatch": "4.0.3", - "piscina": "5.1.3", - "rollup": "4.52.3", - "sass": "1.90.0", - "semver": "7.7.2", - "source-map-support": "0.5.21", - "tinyglobby": "0.2.14", - "vite": "7.1.11", - "watchpack": "2.4.4" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - }, - "optionalDependencies": { - "lmdb": "3.4.2" - }, - "peerDependencies": { - "@angular/compiler": "^20.0.0", - "@angular/compiler-cli": "^20.0.0", - "@angular/core": "^20.0.0", - "@angular/localize": "^20.0.0", - "@angular/platform-browser": "^20.0.0", - "@angular/platform-server": "^20.0.0", - "@angular/service-worker": "^20.0.0", - "@angular/ssr": "^20.3.9", - "karma": "^6.4.0", - "less": "^4.2.0", - "ng-packagr": "^20.0.0", - "postcss": "^8.4.0", - "tailwindcss": "^2.0.0 || ^3.0.0 || ^4.0.0", - "tslib": "^2.3.0", - "typescript": ">=5.8 <6.0", - "vitest": "^3.1.1" - }, - "peerDependenciesMeta": { - "@angular/core": { - "optional": true - }, - "@angular/localize": { - "optional": true - }, - "@angular/platform-browser": { - "optional": true - }, - "@angular/platform-server": { - "optional": true - }, - "@angular/service-worker": { - "optional": true - }, - "@angular/ssr": { - "optional": true - }, - "karma": { - "optional": true - }, - "less": { - "optional": true - }, - "ng-packagr": { - "optional": true - }, - "postcss": { - "optional": true - }, - "tailwindcss": { - "optional": true - }, - "vitest": { - "optional": true - } - } - }, - "node_modules/@angular/cli": { - "version": "20.3.9", - "resolved": "https://registry.npmjs.org/@angular/cli/-/cli-20.3.9.tgz", - "integrity": "sha512-4eKpRDg96B20yrKJqjA24zgxYy1RiRd70FvF/KG1hqSowsWwtzydtEJ3VM6iFWS9t1D8truuVpKjMEnn1Y274A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@angular-devkit/architect": "0.2003.9", - "@angular-devkit/core": "20.3.9", - "@angular-devkit/schematics": "20.3.9", - "@inquirer/prompts": "7.8.2", - "@listr2/prompt-adapter-inquirer": "3.0.1", - "@modelcontextprotocol/sdk": "1.17.3", - "@schematics/angular": "20.3.9", - "@yarnpkg/lockfile": "1.1.0", - "algoliasearch": "5.35.0", - "ini": "5.0.0", - "jsonc-parser": "3.3.1", - "listr2": "9.0.1", - "npm-package-arg": "13.0.0", - "pacote": "21.0.0", - "resolve": "1.22.10", - "semver": "7.7.2", - "yargs": "18.0.0", - "zod": "3.25.76" - }, - "bin": { - "ng": "bin/ng.js" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - } - }, - "node_modules/@angular/common": { - "version": "20.3.10", - "resolved": "https://registry.npmjs.org/@angular/common/-/common-20.3.10.tgz", - "integrity": "sha512-12fEzvKbEqjqy1fSk9DMYlJz6dF1MJVXuC5BB+oWWJpd+2lfh4xJ62pkvvLGAICI89hfM5n9Cy5kWnXwnqPZsA==", - "license": "MIT", - "peer": true, - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0" - }, - "peerDependencies": { - "@angular/core": "20.3.10", - "rxjs": "^6.5.3 || ^7.4.0" - } - }, - "node_modules/@angular/compiler": { - "version": "20.3.10", - "resolved": "https://registry.npmjs.org/@angular/compiler/-/compiler-20.3.10.tgz", - "integrity": "sha512-cW939Lr8GZjPSYfbQKIDNrUaHWmn2M+zBbERThfq5skLuY+xM60bJFv4NqBekfX6YqKLCY62ilUZlnImYIXaqA==", - "license": "MIT", - "peer": true, - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0" - } - }, - "node_modules/@angular/compiler-cli": { - "version": "20.3.10", - "resolved": "https://registry.npmjs.org/@angular/compiler-cli/-/compiler-cli-20.3.10.tgz", - "integrity": "sha512-9BemvpFxA26yIVdu8ROffadMkEdlk/AQQ2Jb486w7RPkrvUQ0pbEJukhv9aryJvhbMopT66S5H/j4ipOUMzmzQ==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@babel/core": "7.28.3", - "@jridgewell/sourcemap-codec": "^1.4.14", - "chokidar": "^4.0.0", - "convert-source-map": "^1.5.1", - "reflect-metadata": "^0.2.0", - "semver": "^7.0.0", - "tslib": "^2.3.0", - "yargs": "^18.0.0" - }, - "bin": { - "ng-xi18n": "bundles/src/bin/ng_xi18n.js", - "ngc": "bundles/src/bin/ngc.js" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0" - }, - "peerDependencies": { - "@angular/compiler": "20.3.10", - "typescript": ">=5.8 <6.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@angular/core": { - "version": "20.3.10", - "resolved": "https://registry.npmjs.org/@angular/core/-/core-20.3.10.tgz", - "integrity": "sha512-g99Qe+NOVo72OLxowVF9NjCckswWYHmvO7MgeiZTDJbTjF9tXH96dMx7AWq76/GUinV10sNzDysVW16NoAbCRQ==", - "license": "MIT", - "peer": true, - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0" - }, - "peerDependencies": { - "@angular/compiler": "20.3.10", - "rxjs": "^6.5.3 || ^7.4.0", - "zone.js": "~0.15.0" - }, - "peerDependenciesMeta": { - "@angular/compiler": { - "optional": true - }, - "zone.js": { - "optional": true - } - } - }, - "node_modules/@angular/elements": { - "version": "20.3.10", - "resolved": "https://registry.npmjs.org/@angular/elements/-/elements-20.3.10.tgz", - "integrity": "sha512-8xqd3v/e0oNPZFt35OdrXU61a4ughsNjjRgc+j9eD4u4KpLggTMBKW26hh2c6nAnqhZcH3eX6qLBx0wU3zN95w==", - "license": "MIT", - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0" - }, - "peerDependencies": { - "@angular/core": "20.3.10", - "rxjs": "^6.5.3 || ^7.4.0" - } - }, - "node_modules/@angular/forms": { - "version": "20.3.10", - "resolved": "https://registry.npmjs.org/@angular/forms/-/forms-20.3.10.tgz", - "integrity": "sha512-9yWr51EUauTEINB745AaHwZNTHLpXIm4uxuykxzOg+g2QskEgVfH26uS8G2ogdNuwYpB8wnsXWr34qhM3qgOWw==", - "license": "MIT", - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0" - }, - "peerDependencies": { - "@angular/common": "20.3.10", - "@angular/core": "20.3.10", - "@angular/platform-browser": "20.3.10", - "rxjs": "^6.5.3 || ^7.4.0" - } - }, - "node_modules/@angular/platform-browser": { - "version": "20.3.10", - "resolved": "https://registry.npmjs.org/@angular/platform-browser/-/platform-browser-20.3.10.tgz", - "integrity": "sha512-UV8CGoB5P3FmJciI3/I/n3L7C3NVgGh7bIlZ1BaB/qJDtv0Wq0rRAGwmT/Z3gwmrRtfHZWme7/CeQ2CYJmMyUQ==", - "license": "MIT", - "peer": true, - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0" - }, - "peerDependencies": { - "@angular/animations": "20.3.10", - "@angular/common": "20.3.10", - "@angular/core": "20.3.10" - }, - "peerDependenciesMeta": { - "@angular/animations": { - "optional": true - } - } - }, - "node_modules/@angular/router": { - "version": "20.3.10", - "resolved": "https://registry.npmjs.org/@angular/router/-/router-20.3.10.tgz", - "integrity": "sha512-Z03cfH1jgQ7XMDJj4R8qAGqivcvhdG3wYBwaiN1K1ODBgPhbFKNeD4stKqYp7xBNtswmM2O2jMxrL/Djwju4Gg==", - "license": "MIT", - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0" - }, - "peerDependencies": { - "@angular/common": "20.3.10", - "@angular/core": "20.3.10", - "@angular/platform-browser": "20.3.10", - "rxjs": "^6.5.3 || ^7.4.0" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", - "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.27.1", - "js-tokens": "^4.0.0", - "picocolors": "^1.1.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/compat-data": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz", - "integrity": "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.3.tgz", - "integrity": "sha512-yDBHV9kQNcr2/sUr9jghVyz9C3Y5G2zUM2H2lo+9mKv4sFgbA8s8Z9t8D1jiTkGoO/NoIfKMyKWr4s6CN23ZwQ==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-module-transforms": "^7.28.3", - "@babel/helpers": "^7.28.3", - "@babel/parser": "^7.28.3", - "@babel/template": "^7.27.2", - "@babel/traverse": "^7.28.3", - "@babel/types": "^7.28.2", - "convert-source-map": "^2.0.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/babel" - } - }, - "node_modules/@babel/core/node_modules/convert-source-map": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", - "dev": true, - "license": "MIT" - }, - "node_modules/@babel/core/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/generator": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", - "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.28.5", - "@babel/types": "^7.28.5", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-annotate-as-pure": { - "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", - "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.27.3" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", - "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.27.2", - "@babel/helper-validator-option": "^7.27.1", - "browserslist": "^4.24.0", - "lru-cache": "^5.1.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/helper-globals": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", - "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-imports": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", - "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-transforms": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz", - "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1", - "@babel/traverse": "^7.28.3" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.7.tgz", - "integrity": "sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-string-parser": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", - "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", - "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-option": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", - "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helpers": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", - "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/parser": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", - "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.28.5" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/template": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", - "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/parser": "^7.27.2", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/traverse": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz", - "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.5", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.5", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.5", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/types": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", - "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.28.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@colors/colors": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", - "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.1.90" - } - }, - "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.9.tgz", - "integrity": "sha512-OaGtL73Jck6pBKjNIe24BnFE6agGl+6KxDtTfHhy1HmhthfKouEcOhqpSL64K4/0WCtbKFLOdzD/44cJ4k9opA==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.9.tgz", - "integrity": "sha512-5WNI1DaMtxQ7t7B6xa572XMXpHAaI/9Hnhk8lcxF4zVN4xstUgTlvuGDorBguKEnZO70qwEcLpfifMLoxiPqHQ==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.9.tgz", - "integrity": "sha512-IDrddSmpSv51ftWslJMvl3Q2ZT98fUSL2/rlUXuVqRXHCs5EUF1/f+jbjF5+NG9UffUDMCiTyh8iec7u8RlTLg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.9.tgz", - "integrity": "sha512-I853iMZ1hWZdNllhVZKm34f4wErd4lMyeV7BLzEExGEIZYsOzqDWDf+y082izYUE8gtJnYHdeDpN/6tUdwvfiw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.9.tgz", - "integrity": "sha512-XIpIDMAjOELi/9PB30vEbVMs3GV1v2zkkPnuyRRURbhqjyzIINwj+nbQATh4H9GxUgH1kFsEyQMxwiLFKUS6Rg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.9.tgz", - "integrity": "sha512-jhHfBzjYTA1IQu8VyrjCX4ApJDnH+ez+IYVEoJHeqJm9VhG9Dh2BYaJritkYK3vMaXrf7Ogr/0MQ8/MeIefsPQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.9.tgz", - "integrity": "sha512-z93DmbnY6fX9+KdD4Ue/H6sYs+bhFQJNCPZsi4XWJoYblUqT06MQUdBCpcSfuiN72AbqeBFu5LVQTjfXDE2A6Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.9.tgz", - "integrity": "sha512-mrKX6H/vOyo5v71YfXWJxLVxgy1kyt1MQaD8wZJgJfG4gq4DpQGpgTB74e5yBeQdyMTbgxp0YtNj7NuHN0PoZg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.9.tgz", - "integrity": "sha512-HBU2Xv78SMgaydBmdor38lg8YDnFKSARg1Q6AT0/y2ezUAKiZvc211RDFHlEZRFNRVhcMamiToo7bDx3VEOYQw==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.9.tgz", - "integrity": "sha512-BlB7bIcLT3G26urh5Dmse7fiLmLXnRlopw4s8DalgZ8ef79Jj4aUcYbk90g8iCa2467HX8SAIidbL7gsqXHdRw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.9.tgz", - "integrity": "sha512-e7S3MOJPZGp2QW6AK6+Ly81rC7oOSerQ+P8L0ta4FhVi+/j/v2yZzx5CqqDaWjtPFfYz21Vi1S0auHrap3Ma3A==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.9.tgz", - "integrity": "sha512-Sbe10Bnn0oUAB2AalYztvGcK+o6YFFA/9829PhOCUS9vkJElXGdphz0A3DbMdP8gmKkqPmPcMJmJOrI3VYB1JQ==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.9.tgz", - "integrity": "sha512-YcM5br0mVyZw2jcQeLIkhWtKPeVfAerES5PvOzaDxVtIyZ2NUBZKNLjC5z3/fUlDgT6w89VsxP2qzNipOaaDyA==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.9.tgz", - "integrity": "sha512-++0HQvasdo20JytyDpFvQtNrEsAgNG2CY1CLMwGXfFTKGBGQT3bOeLSYE2l1fYdvML5KUuwn9Z8L1EWe2tzs1w==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.9.tgz", - "integrity": "sha512-uNIBa279Y3fkjV+2cUjx36xkx7eSjb8IvnL01eXUKXez/CBHNRw5ekCGMPM0BcmqBxBcdgUWuUXmVWwm4CH9kg==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.9.tgz", - "integrity": "sha512-Mfiphvp3MjC/lctb+7D287Xw1DGzqJPb/J2aHHcHxflUo+8tmN/6d4k6I2yFR7BVo5/g7x2Monq4+Yew0EHRIA==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.9.tgz", - "integrity": "sha512-iSwByxzRe48YVkmpbgoxVzn76BXjlYFXC7NvLYq+b+kDjyyk30J0JY47DIn8z1MO3K0oSl9fZoRmZPQI4Hklzg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.9.tgz", - "integrity": "sha512-9jNJl6FqaUG+COdQMjSCGW4QiMHH88xWbvZ+kRVblZsWrkXlABuGdFJ1E9L7HK+T0Yqd4akKNa/lO0+jDxQD4Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.9.tgz", - "integrity": "sha512-RLLdkflmqRG8KanPGOU7Rpg829ZHu8nFy5Pqdi9U01VYtG9Y0zOG6Vr2z4/S+/3zIyOxiK6cCeYNWOFR9QP87g==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.9.tgz", - "integrity": "sha512-YaFBlPGeDasft5IIM+CQAhJAqS3St3nJzDEgsgFixcfZeyGPCd6eJBWzke5piZuZ7CtL656eOSYKk4Ls2C0FRQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.9.tgz", - "integrity": "sha512-1MkgTCuvMGWuqVtAvkpkXFmtL8XhWy+j4jaSO2wxfJtilVCi0ZE37b8uOdMItIHz4I6z1bWWtEX4CJwcKYLcuA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openharmony-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.9.tgz", - "integrity": "sha512-4Xd0xNiMVXKh6Fa7HEJQbrpP3m3DDn43jKxMjxLLRjWnRsfxjORYJlXPO4JNcXtOyfajXorRKY9NkOpTHptErg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.9.tgz", - "integrity": "sha512-WjH4s6hzo00nNezhp3wFIAfmGZ8U7KtrJNlFMRKxiI9mxEK1scOMAaa9i4crUtu+tBr+0IN6JCuAcSBJZfnphw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.9.tgz", - "integrity": "sha512-mGFrVJHmZiRqmP8xFOc6b84/7xa5y5YvR1x8djzXpJBSv/UsNK6aqec+6JDjConTgvvQefdGhFDAs2DLAds6gQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.9.tgz", - "integrity": "sha512-b33gLVU2k11nVx1OhX3C8QQP6UHQK4ZtN56oFWvVXvz2VkDoe6fbG8TOgHFxEvqeqohmRnIHe5A1+HADk4OQww==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.9.tgz", - "integrity": "sha512-PPOl1mi6lpLNQxnGoyAfschAodRFYXJ+9fs6WHXz7CSWKbOqiMZsubC+BQsVKuul+3vKLuwTHsS2c2y9EoKwxQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@inquirer/ansi": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@inquirer/ansi/-/ansi-1.0.2.tgz", - "integrity": "sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - } - }, - "node_modules/@inquirer/checkbox": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/@inquirer/checkbox/-/checkbox-4.3.1.tgz", - "integrity": "sha512-rOcLotrptYIy59SGQhKlU0xBg1vvcVl2FdPIEclUvKHh0wo12OfGkId/01PIMJ/V+EimJ77t085YabgnQHBa5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/ansi": "^1.0.2", - "@inquirer/core": "^10.3.1", - "@inquirer/figures": "^1.0.15", - "@inquirer/type": "^3.0.10", - "yoctocolors-cjs": "^2.1.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@inquirer/confirm": { - "version": "5.1.14", - "resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-5.1.14.tgz", - "integrity": "sha512-5yR4IBfe0kXe59r1YCTG8WXkUbl7Z35HK87Sw+WUyGD8wNUx7JvY7laahzeytyE1oLn74bQnL7hstctQxisQ8Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/core": "^10.1.15", - "@inquirer/type": "^3.0.8" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@inquirer/core": { - "version": "10.3.1", - "resolved": "https://registry.npmjs.org/@inquirer/core/-/core-10.3.1.tgz", - "integrity": "sha512-hzGKIkfomGFPgxKmnKEKeA+uCYBqC+TKtRx5LgyHRCrF6S2MliwRIjp3sUaWwVzMp7ZXVs8elB0Tfe682Rpg4w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/ansi": "^1.0.2", - "@inquirer/figures": "^1.0.15", - "@inquirer/type": "^3.0.10", - "cli-width": "^4.1.0", - "mute-stream": "^3.0.0", - "signal-exit": "^4.1.0", - "wrap-ansi": "^6.2.0", - "yoctocolors-cjs": "^2.1.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@inquirer/editor": { - "version": "4.2.22", - "resolved": "https://registry.npmjs.org/@inquirer/editor/-/editor-4.2.22.tgz", - "integrity": "sha512-8yYZ9TCbBKoBkzHtVNMF6PV1RJEUvMlhvmS3GxH4UvXMEHlS45jFyqFy0DU+K42jBs5slOaA78xGqqqWAx3u6A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/core": "^10.3.1", - "@inquirer/external-editor": "^1.0.3", - "@inquirer/type": "^3.0.10" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@inquirer/expand": { - "version": "4.0.22", - "resolved": "https://registry.npmjs.org/@inquirer/expand/-/expand-4.0.22.tgz", - "integrity": "sha512-9XOjCjvioLjwlq4S4yXzhvBmAXj5tG+jvva0uqedEsQ9VD8kZ+YT7ap23i0bIXOtow+di4+u3i6u26nDqEfY4Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/core": "^10.3.1", - "@inquirer/type": "^3.0.10", - "yoctocolors-cjs": "^2.1.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@inquirer/external-editor": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@inquirer/external-editor/-/external-editor-1.0.3.tgz", - "integrity": "sha512-RWbSrDiYmO4LbejWY7ttpxczuwQyZLBUyygsA9Nsv95hpzUWwnNTVQmAq3xuh7vNwCp07UTmE5i11XAEExx4RA==", - "dev": true, - "license": "MIT", - "dependencies": { - "chardet": "^2.1.1", - "iconv-lite": "^0.7.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@inquirer/figures": { - "version": "1.0.15", - "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.15.tgz", - "integrity": "sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - } - }, - "node_modules/@inquirer/input": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/@inquirer/input/-/input-4.3.0.tgz", - "integrity": "sha512-h4fgse5zeGsBSW3cRQqu9a99OXRdRsNCvHoBqVmz40cjYjYFzcfwD0KA96BHIPlT7rZw0IpiefQIqXrjbzjS4Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/core": "^10.3.1", - "@inquirer/type": "^3.0.10" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@inquirer/number": { - "version": "3.0.22", - "resolved": "https://registry.npmjs.org/@inquirer/number/-/number-3.0.22.tgz", - "integrity": "sha512-oAdMJXz++fX58HsIEYmvuf5EdE8CfBHHXjoi9cTcQzgFoHGZE+8+Y3P38MlaRMeBvAVnkWtAxMUF6urL2zYsbg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/core": "^10.3.1", - "@inquirer/type": "^3.0.10" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@inquirer/password": { - "version": "4.0.22", - "resolved": "https://registry.npmjs.org/@inquirer/password/-/password-4.0.22.tgz", - "integrity": "sha512-CbdqK1ioIr0Y3akx03k/+Twf+KSlHjn05hBL+rmubMll7PsDTGH0R4vfFkr+XrkB0FOHrjIwVP9crt49dgt+1g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/ansi": "^1.0.2", - "@inquirer/core": "^10.3.1", - "@inquirer/type": "^3.0.10" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@inquirer/prompts": { - "version": "7.8.2", - "resolved": "https://registry.npmjs.org/@inquirer/prompts/-/prompts-7.8.2.tgz", - "integrity": "sha512-nqhDw2ZcAUrKNPwhjinJny903bRhI0rQhiDz1LksjeRxqa36i3l75+4iXbOy0rlDpLJGxqtgoPavQjmmyS5UJw==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@inquirer/checkbox": "^4.2.1", - "@inquirer/confirm": "^5.1.14", - "@inquirer/editor": "^4.2.17", - "@inquirer/expand": "^4.0.17", - "@inquirer/input": "^4.2.1", - "@inquirer/number": "^3.0.17", - "@inquirer/password": "^4.0.17", - "@inquirer/rawlist": "^4.1.5", - "@inquirer/search": "^3.1.0", - "@inquirer/select": "^4.3.1" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@inquirer/rawlist": { - "version": "4.1.10", - "resolved": "https://registry.npmjs.org/@inquirer/rawlist/-/rawlist-4.1.10.tgz", - "integrity": "sha512-Du4uidsgTMkoH5izgpfyauTL/ItVHOLsVdcY+wGeoGaG56BV+/JfmyoQGniyhegrDzXpfn3D+LFHaxMDRygcAw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/core": "^10.3.1", - "@inquirer/type": "^3.0.10", - "yoctocolors-cjs": "^2.1.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@inquirer/search": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/@inquirer/search/-/search-3.2.1.tgz", - "integrity": "sha512-cKiuUvETublmTmaOneEermfG2tI9ABpb7fW/LqzZAnSv4ZaJnbEis05lOkiBuYX5hNdnX0Q9ryOQyrNidb55WA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/core": "^10.3.1", - "@inquirer/figures": "^1.0.15", - "@inquirer/type": "^3.0.10", - "yoctocolors-cjs": "^2.1.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@inquirer/select": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/@inquirer/select/-/select-4.4.1.tgz", - "integrity": "sha512-E9hbLU4XsNe2SAOSsFrtYtYQDVi1mfbqJrPDvXKnGlnRiApBdWMJz7r3J2Ff38AqULkPUD3XjQMD4492TymD7Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/ansi": "^1.0.2", - "@inquirer/core": "^10.3.1", - "@inquirer/figures": "^1.0.15", - "@inquirer/type": "^3.0.10", - "yoctocolors-cjs": "^2.1.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@inquirer/type": { - "version": "3.0.10", - "resolved": "https://registry.npmjs.org/@inquirer/type/-/type-3.0.10.tgz", - "integrity": "sha512-BvziSRxfz5Ov8ch0z/n3oijRSEcEsHnhggm4xFZe93DHcUCTlutlq9Ox4SVENAfcRD22UQq7T/atg9Wr3k09eA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@isaacs/balanced-match": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz", - "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "20 || >=22" - } - }, - "node_modules/@isaacs/brace-expansion": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz", - "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@isaacs/balanced-match": "^4.0.1" - }, - "engines": { - "node": "20 || >=22" - } - }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@isaacs/cliui/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true, - "license": "MIT" - }, - "node_modules/@isaacs/cliui/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "license": "MIT", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/@isaacs/fs-minipass": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", - "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^7.0.4" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@istanbuljs/schema": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", - "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.13", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", - "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0", - "@jridgewell/trace-mapping": "^0.3.24" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", - "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", - "dev": true, - "license": "MIT" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.31", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", - "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@listr2/prompt-adapter-inquirer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@listr2/prompt-adapter-inquirer/-/prompt-adapter-inquirer-3.0.1.tgz", - "integrity": "sha512-3XFmGwm3u6ioREG+ynAQB7FoxfajgQnMhIu8wC5eo/Lsih4aKDg0VuIMGaOsYn7hJSJagSeaD4K8yfpkEoDEmA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/type": "^3.0.7" - }, - "engines": { - "node": ">=20.0.0" - }, - "peerDependencies": { - "@inquirer/prompts": ">= 3 < 8", - "listr2": "9.0.1" - } - }, - "node_modules/@lmdb/lmdb-darwin-arm64": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/@lmdb/lmdb-darwin-arm64/-/lmdb-darwin-arm64-3.4.2.tgz", - "integrity": "sha512-NK80WwDoODyPaSazKbzd3NEJ3ygePrkERilZshxBViBARNz21rmediktGHExoj9n5t9+ChlgLlxecdFKLCuCKg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@lmdb/lmdb-darwin-x64": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/@lmdb/lmdb-darwin-x64/-/lmdb-darwin-x64-3.4.2.tgz", - "integrity": "sha512-zevaowQNmrp3U7Fz1s9pls5aIgpKRsKb3dZWDINtLiozh3jZI9fBrI19lYYBxqdyiIyNdlyiidPnwPShj4aK+w==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@lmdb/lmdb-linux-arm": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/@lmdb/lmdb-linux-arm/-/lmdb-linux-arm-3.4.2.tgz", - "integrity": "sha512-OmHCULY17rkx/RoCoXlzU7LyR8xqrksgdYWwtYa14l/sseezZ8seKWXcogHcjulBddER5NnEFV4L/Jtr2nyxeg==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@lmdb/lmdb-linux-arm64": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/@lmdb/lmdb-linux-arm64/-/lmdb-linux-arm64-3.4.2.tgz", - "integrity": "sha512-ZBEfbNZdkneebvZs98Lq30jMY8V9IJzckVeigGivV7nTHJc+89Ctomp1kAIWKlwIG0ovCDrFI448GzFPORANYg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@lmdb/lmdb-linux-x64": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/@lmdb/lmdb-linux-x64/-/lmdb-linux-x64-3.4.2.tgz", - "integrity": "sha512-vL9nM17C77lohPYE4YaAQvfZCSVJSryE4fXdi8M7uWPBnU+9DJabgKVAeyDb84ZM2vcFseoBE4/AagVtJeRE7g==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@lmdb/lmdb-win32-arm64": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/@lmdb/lmdb-win32-arm64/-/lmdb-win32-arm64-3.4.2.tgz", - "integrity": "sha512-SXWjdBfNDze4ZPeLtYIzsIeDJDJ/SdsA0pEXcUBayUIMO0FQBHfVZZyHXQjjHr4cvOAzANBgIiqaXRwfMhzmLw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@lmdb/lmdb-win32-x64": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/@lmdb/lmdb-win32-x64/-/lmdb-win32-x64-3.4.2.tgz", - "integrity": "sha512-IY+r3bxKW6Q6sIPiMC0L533DEfRJSXibjSI3Ft/w9Q8KQBNqEIvUFXt+09wV8S5BRk0a8uSF19YWxuRwEfI90g==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@modelcontextprotocol/sdk": { - "version": "1.17.3", - "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.3.tgz", - "integrity": "sha512-JPwUKWSsbzx+DLFznf/QZ32Qa+ptfbUlHhRLrBQBAFu9iI1iYvizM4p+zhhRDceSsPutXp4z+R/HPVphlIiclg==", - "dev": true, - "license": "MIT", - "dependencies": { - "ajv": "^6.12.6", - "content-type": "^1.0.5", - "cors": "^2.8.5", - "cross-spawn": "^7.0.5", - "eventsource": "^3.0.2", - "eventsource-parser": "^3.0.0", - "express": "^5.0.1", - "express-rate-limit": "^7.5.0", - "pkce-challenge": "^5.0.0", - "raw-body": "^3.0.0", - "zod": "^3.23.8", - "zod-to-json-schema": "^3.24.1" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true, - "license": "MIT" - }, - "node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz", - "integrity": "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@msgpackr-extract/msgpackr-extract-darwin-x64": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-3.0.3.tgz", - "integrity": "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@msgpackr-extract/msgpackr-extract-linux-arm": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-3.0.3.tgz", - "integrity": "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@msgpackr-extract/msgpackr-extract-linux-arm64": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-3.0.3.tgz", - "integrity": "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz", - "integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@msgpackr-extract/msgpackr-extract-win32-x64": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-3.0.3.tgz", - "integrity": "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@napi-rs/nice": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice/-/nice-1.1.1.tgz", - "integrity": "sha512-xJIPs+bYuc9ASBl+cvGsKbGrJmS6fAKaSZCnT0lhahT5rhA2VVy9/EcIgd2JhtEuFOJNx7UHNn/qiTPTY4nrQw==", - "dev": true, - "license": "MIT", - "optional": true, - "engines": { - "node": ">= 10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/Brooooooklyn" - }, - "optionalDependencies": { - "@napi-rs/nice-android-arm-eabi": "1.1.1", - "@napi-rs/nice-android-arm64": "1.1.1", - "@napi-rs/nice-darwin-arm64": "1.1.1", - "@napi-rs/nice-darwin-x64": "1.1.1", - "@napi-rs/nice-freebsd-x64": "1.1.1", - "@napi-rs/nice-linux-arm-gnueabihf": "1.1.1", - "@napi-rs/nice-linux-arm64-gnu": "1.1.1", - "@napi-rs/nice-linux-arm64-musl": "1.1.1", - "@napi-rs/nice-linux-ppc64-gnu": "1.1.1", - "@napi-rs/nice-linux-riscv64-gnu": "1.1.1", - "@napi-rs/nice-linux-s390x-gnu": "1.1.1", - "@napi-rs/nice-linux-x64-gnu": "1.1.1", - "@napi-rs/nice-linux-x64-musl": "1.1.1", - "@napi-rs/nice-openharmony-arm64": "1.1.1", - "@napi-rs/nice-win32-arm64-msvc": "1.1.1", - "@napi-rs/nice-win32-ia32-msvc": "1.1.1", - "@napi-rs/nice-win32-x64-msvc": "1.1.1" - } - }, - "node_modules/@napi-rs/nice-android-arm-eabi": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-android-arm-eabi/-/nice-android-arm-eabi-1.1.1.tgz", - "integrity": "sha512-kjirL3N6TnRPv5iuHw36wnucNqXAO46dzK9oPb0wj076R5Xm8PfUVA9nAFB5ZNMmfJQJVKACAPd/Z2KYMppthw==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-android-arm64": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-android-arm64/-/nice-android-arm64-1.1.1.tgz", - "integrity": "sha512-blG0i7dXgbInN5urONoUCNf+DUEAavRffrO7fZSeoRMJc5qD+BJeNcpr54msPF6qfDD6kzs9AQJogZvT2KD5nw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-darwin-arm64": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-darwin-arm64/-/nice-darwin-arm64-1.1.1.tgz", - "integrity": "sha512-s/E7w45NaLqTGuOjC2p96pct4jRfo61xb9bU1unM/MJ/RFkKlJyJDx7OJI/O0ll/hrfpqKopuAFDV8yo0hfT7A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-darwin-x64": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-darwin-x64/-/nice-darwin-x64-1.1.1.tgz", - "integrity": "sha512-dGoEBnVpsdcC+oHHmW1LRK5eiyzLwdgNQq3BmZIav+9/5WTZwBYX7r5ZkQC07Nxd3KHOCkgbHSh4wPkH1N1LiQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-freebsd-x64": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-freebsd-x64/-/nice-freebsd-x64-1.1.1.tgz", - "integrity": "sha512-kHv4kEHAylMYmlNwcQcDtXjklYp4FCf0b05E+0h6nDHsZ+F0bDe04U/tXNOqrx5CmIAth4vwfkjjUmp4c4JktQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-linux-arm-gnueabihf": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-arm-gnueabihf/-/nice-linux-arm-gnueabihf-1.1.1.tgz", - "integrity": "sha512-E1t7K0efyKXZDoZg1LzCOLxgolxV58HCkaEkEvIYQx12ht2pa8hoBo+4OB3qh7e+QiBlp1SRf+voWUZFxyhyqg==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-linux-arm64-gnu": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-arm64-gnu/-/nice-linux-arm64-gnu-1.1.1.tgz", - "integrity": "sha512-CIKLA12DTIZlmTaaKhQP88R3Xao+gyJxNWEn04wZwC2wmRapNnxCUZkVwggInMJvtVElA+D4ZzOU5sX4jV+SmQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-linux-arm64-musl": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-arm64-musl/-/nice-linux-arm64-musl-1.1.1.tgz", - "integrity": "sha512-+2Rzdb3nTIYZ0YJF43qf2twhqOCkiSrHx2Pg6DJaCPYhhaxbLcdlV8hCRMHghQ+EtZQWGNcS2xF4KxBhSGeutg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-linux-ppc64-gnu": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-ppc64-gnu/-/nice-linux-ppc64-gnu-1.1.1.tgz", - "integrity": "sha512-4FS8oc0GeHpwvv4tKciKkw3Y4jKsL7FRhaOeiPei0X9T4Jd619wHNe4xCLmN2EMgZoeGg+Q7GY7BsvwKpL22Tg==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-linux-riscv64-gnu": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-riscv64-gnu/-/nice-linux-riscv64-gnu-1.1.1.tgz", - "integrity": "sha512-HU0nw9uD4FO/oGCCk409tCi5IzIZpH2agE6nN4fqpwVlCn5BOq0MS1dXGjXaG17JaAvrlpV5ZeyZwSon10XOXw==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-linux-s390x-gnu": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-s390x-gnu/-/nice-linux-s390x-gnu-1.1.1.tgz", - "integrity": "sha512-2YqKJWWl24EwrX0DzCQgPLKQBxYDdBxOHot1KWEq7aY2uYeX+Uvtv4I8xFVVygJDgf6/92h9N3Y43WPx8+PAgQ==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-linux-x64-gnu": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-x64-gnu/-/nice-linux-x64-gnu-1.1.1.tgz", - "integrity": "sha512-/gaNz3R92t+dcrfCw/96pDopcmec7oCcAQ3l/M+Zxr82KT4DljD37CpgrnXV+pJC263JkW572pdbP3hP+KjcIg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-linux-x64-musl": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-linux-x64-musl/-/nice-linux-x64-musl-1.1.1.tgz", - "integrity": "sha512-xScCGnyj/oppsNPMnevsBe3pvNaoK7FGvMjT35riz9YdhB2WtTG47ZlbxtOLpjeO9SqqQ2J2igCmz6IJOD5JYw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-openharmony-arm64": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-openharmony-arm64/-/nice-openharmony-arm64-1.1.1.tgz", - "integrity": "sha512-6uJPRVwVCLDeoOaNyeiW0gp2kFIM4r7PL2MczdZQHkFi9gVlgm+Vn+V6nTWRcu856mJ2WjYJiumEajfSm7arPQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-win32-arm64-msvc": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-win32-arm64-msvc/-/nice-win32-arm64-msvc-1.1.1.tgz", - "integrity": "sha512-uoTb4eAvM5B2aj/z8j+Nv8OttPf2m+HVx3UjA5jcFxASvNhQriyCQF1OB1lHL43ZhW+VwZlgvjmP5qF3+59atA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-win32-ia32-msvc": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-win32-ia32-msvc/-/nice-win32-ia32-msvc-1.1.1.tgz", - "integrity": "sha512-CNQqlQT9MwuCsg1Vd/oKXiuH+TcsSPJmlAFc5frFyX/KkOh0UpBLEj7aoY656d5UKZQMQFP7vJNa1DNUNORvug==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/nice-win32-x64-msvc": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@napi-rs/nice-win32-x64-msvc/-/nice-win32-x64-msvc-1.1.1.tgz", - "integrity": "sha512-vB+4G/jBQCAh0jelMTY3+kgFy00Hlx2f2/1zjMoH821IbplbWZOkLiTYXQkygNTzQJTq5cvwBDgn2ppHD+bglQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@npmcli/agent": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-3.0.0.tgz", - "integrity": "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q==", - "dev": true, - "license": "ISC", - "dependencies": { - "agent-base": "^7.1.0", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.1", - "lru-cache": "^10.0.1", - "socks-proxy-agent": "^8.0.3" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/agent/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/@npmcli/fs": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-4.0.0.tgz", - "integrity": "sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q==", - "dev": true, - "license": "ISC", - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/git": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-6.0.3.tgz", - "integrity": "sha512-GUYESQlxZRAdhs3UhbB6pVRNUELQOHXwK9ruDkwmCv2aZ5y0SApQzUJCg02p3A7Ue2J5hxvlk1YI53c00NmRyQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/promise-spawn": "^8.0.0", - "ini": "^5.0.0", - "lru-cache": "^10.0.1", - "npm-pick-manifest": "^10.0.0", - "proc-log": "^5.0.0", - "promise-retry": "^2.0.1", - "semver": "^7.3.5", - "which": "^5.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/git/node_modules/isexe": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", - "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16" - } - }, - "node_modules/@npmcli/git/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/@npmcli/git/node_modules/which": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz", - "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^3.1.1" - }, - "bin": { - "node-which": "bin/which.js" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/installed-package-contents": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-3.0.0.tgz", - "integrity": "sha512-fkxoPuFGvxyrH+OQzyTkX2LUEamrF4jZSmxjAtPPHHGO0dqsQ8tTKjnIS8SAnPHdk2I03BDtSMR5K/4loKg79Q==", - "dev": true, - "license": "ISC", - "dependencies": { - "npm-bundled": "^4.0.0", - "npm-normalize-package-bin": "^4.0.0" - }, - "bin": { - "installed-package-contents": "bin/index.js" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/node-gyp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-4.0.0.tgz", - "integrity": "sha512-+t5DZ6mO/QFh78PByMq1fGSAub/agLJZDRfJRMeOSNCt8s9YVlTjmGpIPwPhvXTGUIJk+WszlT0rQa1W33yzNA==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/package-json": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz", - "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/git": "^6.0.0", - "glob": "^10.2.2", - "hosted-git-info": "^8.0.0", - "json-parse-even-better-errors": "^4.0.0", - "proc-log": "^5.0.0", - "semver": "^7.5.3", - "validate-npm-package-license": "^3.0.4" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/package-json/node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@npmcli/package-json/node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", - "dev": true, - "license": "ISC", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@npmcli/package-json/node_modules/hosted-git-info": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz", - "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==", - "dev": true, - "license": "ISC", - "dependencies": { - "lru-cache": "^10.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/package-json/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/@npmcli/package-json/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@npmcli/promise-spawn": { - "version": "8.0.3", - "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-8.0.3.tgz", - "integrity": "sha512-Yb00SWaL4F8w+K8YGhQ55+xE4RUNdMHV43WZGsiTM92gS+lC0mGsn7I4hLug7pbao035S6bj3Y3w0cUNGLfmkg==", - "dev": true, - "license": "ISC", - "dependencies": { - "which": "^5.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/promise-spawn/node_modules/isexe": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", - "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16" - } - }, - "node_modules/@npmcli/promise-spawn/node_modules/which": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz", - "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^3.1.1" - }, - "bin": { - "node-which": "bin/which.js" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/redact": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-3.2.2.tgz", - "integrity": "sha512-7VmYAmk4csGv08QzrDKScdzn11jHPFGyqJW39FyPgPuAp3zIaUmuCo1yxw9aGs+NEJuTGQ9Gwqpt93vtJubucg==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/run-script": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-9.1.0.tgz", - "integrity": "sha512-aoNSbxtkePXUlbZB+anS1LqsJdctG5n3UVhfU47+CDdwMi6uNTBMF9gPcQRnqghQd2FGzcwwIFBruFMxjhBewg==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/node-gyp": "^4.0.0", - "@npmcli/package-json": "^6.0.0", - "@npmcli/promise-spawn": "^8.0.0", - "node-gyp": "^11.0.0", - "proc-log": "^5.0.0", - "which": "^5.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@npmcli/run-script/node_modules/isexe": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", - "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16" - } - }, - "node_modules/@npmcli/run-script/node_modules/which": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz", - "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^3.1.1" - }, - "bin": { - "node-which": "bin/which.js" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@parcel/watcher": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.1.tgz", - "integrity": "sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "dependencies": { - "detect-libc": "^1.0.3", - "is-glob": "^4.0.3", - "micromatch": "^4.0.5", - "node-addon-api": "^7.0.0" - }, - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - }, - "optionalDependencies": { - "@parcel/watcher-android-arm64": "2.5.1", - "@parcel/watcher-darwin-arm64": "2.5.1", - "@parcel/watcher-darwin-x64": "2.5.1", - "@parcel/watcher-freebsd-x64": "2.5.1", - "@parcel/watcher-linux-arm-glibc": "2.5.1", - "@parcel/watcher-linux-arm-musl": "2.5.1", - "@parcel/watcher-linux-arm64-glibc": "2.5.1", - "@parcel/watcher-linux-arm64-musl": "2.5.1", - "@parcel/watcher-linux-x64-glibc": "2.5.1", - "@parcel/watcher-linux-x64-musl": "2.5.1", - "@parcel/watcher-win32-arm64": "2.5.1", - "@parcel/watcher-win32-ia32": "2.5.1", - "@parcel/watcher-win32-x64": "2.5.1" - } - }, - "node_modules/@parcel/watcher-android-arm64": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.5.1.tgz", - "integrity": "sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-darwin-arm64": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.1.tgz", - "integrity": "sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-darwin-x64": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.5.1.tgz", - "integrity": "sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-freebsd-x64": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.5.1.tgz", - "integrity": "sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-linux-arm-glibc": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.5.1.tgz", - "integrity": "sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-linux-arm-musl": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-musl/-/watcher-linux-arm-musl-2.5.1.tgz", - "integrity": "sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-linux-arm64-glibc": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.5.1.tgz", - "integrity": "sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-linux-arm64-musl": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.5.1.tgz", - "integrity": "sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-linux-x64-glibc": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.1.tgz", - "integrity": "sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-linux-x64-musl": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.1.tgz", - "integrity": "sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-win32-arm64": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.5.1.tgz", - "integrity": "sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-win32-ia32": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.5.1.tgz", - "integrity": "sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher-win32-x64": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.5.1.tgz", - "integrity": "sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/@parcel/watcher/node_modules/detect-libc": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", - "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", - "dev": true, - "license": "Apache-2.0", - "optional": true, - "bin": { - "detect-libc": "bin/detect-libc.js" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/@parcel/watcher/node_modules/node-addon-api": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz", - "integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==", - "dev": true, - "license": "MIT", - "optional": true - }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "dev": true, - "license": "MIT", - "optional": true, - "engines": { - "node": ">=14" - } - }, - "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.52.3.tgz", - "integrity": "sha512-h6cqHGZ6VdnwliFG1NXvMPTy/9PS3h8oLh7ImwR+kl+oYnQizgjxsONmmPSb2C66RksfkfIxEVtDSEcJiO0tqw==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-android-arm64": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.52.3.tgz", - "integrity": "sha512-wd+u7SLT/u6knklV/ifG7gr5Qy4GUbH2hMWcDauPFJzmCZUAJ8L2bTkVXC2niOIxp8lk3iH/QX8kSrUxVZrOVw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.52.3.tgz", - "integrity": "sha512-lj9ViATR1SsqycwFkJCtYfQTheBdvlWJqzqxwc9f2qrcVrQaF/gCuBRTiTolkRWS6KvNxSk4KHZWG7tDktLgjg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.52.3.tgz", - "integrity": "sha512-+Dyo7O1KUmIsbzx1l+4V4tvEVnVQqMOIYtrxK7ncLSknl1xnMHLgn7gddJVrYPNZfEB8CIi3hK8gq8bDhb3h5A==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.52.3.tgz", - "integrity": "sha512-u9Xg2FavYbD30g3DSfNhxgNrxhi6xVG4Y6i9Ur1C7xUuGDW3banRbXj+qgnIrwRN4KeJ396jchwy9bCIzbyBEQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.52.3.tgz", - "integrity": "sha512-5M8kyi/OX96wtD5qJR89a/3x5x8x5inXBZO04JWhkQb2JWavOWfjgkdvUqibGJeNNaz1/Z1PPza5/tAPXICI6A==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.52.3.tgz", - "integrity": "sha512-IoerZJ4l1wRMopEHRKOO16e04iXRDyZFZnNZKrWeNquh5d6bucjezgd+OxG03mOMTnS1x7hilzb3uURPkJ0OfA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.52.3.tgz", - "integrity": "sha512-ZYdtqgHTDfvrJHSh3W22TvjWxwOgc3ThK/XjgcNGP2DIwFIPeAPNsQxrJO5XqleSlgDux2VAoWQ5iJrtaC1TbA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.52.3.tgz", - "integrity": "sha512-NcViG7A0YtuFDA6xWSgmFb6iPFzHlf5vcqb2p0lGEbT+gjrEEz8nC/EeDHvx6mnGXnGCC1SeVV+8u+smj0CeGQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.52.3.tgz", - "integrity": "sha512-d3pY7LWno6SYNXRm6Ebsq0DJGoiLXTb83AIPCXl9fmtIQs/rXoS8SJxxUNtFbJ5MiOvs+7y34np77+9l4nfFMw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-loong64-gnu": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.52.3.tgz", - "integrity": "sha512-3y5GA0JkBuirLqmjwAKwB0keDlI6JfGYduMlJD/Rl7fvb4Ni8iKdQs1eiunMZJhwDWdCvrcqXRY++VEBbvk6Eg==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.52.3.tgz", - "integrity": "sha512-AUUH65a0p3Q0Yfm5oD2KVgzTKgwPyp9DSXc3UA7DtxhEb/WSPfbG4wqXeSN62OG5gSo18em4xv6dbfcUGXcagw==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.52.3.tgz", - "integrity": "sha512-1makPhFFVBqZE+XFg3Dkq+IkQ7JvmUrwwqaYBL2CE+ZpxPaqkGaiWFEWVGyvTwZace6WLJHwjVh/+CXbKDGPmg==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.52.3.tgz", - "integrity": "sha512-OOFJa28dxfl8kLOPMUOQBCO6z3X2SAfzIE276fwT52uXDWUS178KWq0pL7d6p1kz7pkzA0yQwtqL0dEPoVcRWg==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.52.3.tgz", - "integrity": "sha512-jMdsML2VI5l+V7cKfZx3ak+SLlJ8fKvLJ0Eoa4b9/vCUrzXKgoKxvHqvJ/mkWhFiyp88nCkM5S2v6nIwRtPcgg==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.52.3.tgz", - "integrity": "sha512-tPgGd6bY2M2LJTA1uGq8fkSPK8ZLYjDjY+ZLK9WHncCnfIz29LIXIqUgzCR0hIefzy6Hpbe8Th5WOSwTM8E7LA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.52.3.tgz", - "integrity": "sha512-BCFkJjgk+WFzP+tcSMXq77ymAPIxsX9lFJWs+2JzuZTLtksJ2o5hvgTdIcZ5+oKzUDMwI0PfWzRBYAydAHF2Mw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-openharmony-arm64": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.52.3.tgz", - "integrity": "sha512-KTD/EqjZF3yvRaWUJdD1cW+IQBk4fbQaHYJUmP8N4XoKFZilVL8cobFSTDnjTtxWJQ3JYaMgF4nObY/+nYkumA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ] - }, - "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.52.3.tgz", - "integrity": "sha512-+zteHZdoUYLkyYKObGHieibUFLbttX2r+58l27XZauq0tcWYYuKUwY2wjeCN9oK1Um2YgH2ibd6cnX/wFD7DuA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.52.3.tgz", - "integrity": "sha512-of1iHkTQSo3kr6dTIRX6t81uj/c/b15HXVsPcEElN5sS859qHrOepM5p9G41Hah+CTqSh2r8Bm56dL2z9UQQ7g==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-x64-gnu": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.52.3.tgz", - "integrity": "sha512-s0hybmlHb56mWVZQj8ra9048/WZTPLILKxcvcq+8awSZmyiSUZjjem1AhU3Tf4ZKpYhK4mg36HtHDOe8QJS5PQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.52.3.tgz", - "integrity": "sha512-zGIbEVVXVtauFgl3MRwGWEN36P5ZGenHRMgNw88X5wEhEBpq0XrMEZwOn07+ICrwM17XO5xfMZqh0OldCH5VTA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@schematics/angular": { - "version": "20.3.9", - "resolved": "https://registry.npmjs.org/@schematics/angular/-/angular-20.3.9.tgz", - "integrity": "sha512-XkgTwGhhrx+MVi2+TFO32d6Es5Uezzx7Y7B/e2ulDlj08bizxQj+9wkeLt5+bR8JWODHpEntZn/Xd5WvXnODGA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@angular-devkit/core": "20.3.9", - "@angular-devkit/schematics": "20.3.9", - "jsonc-parser": "3.3.1" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=24.0.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - } - }, - "node_modules/@sigstore/bundle": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-3.1.0.tgz", - "integrity": "sha512-Mm1E3/CmDDCz3nDhFKTuYdB47EdRFRQMOE/EAbiG1MJW77/w1b3P7Qx7JSrVJs8PfwOLOVcKQCHErIwCTyPbag==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@sigstore/protobuf-specs": "^0.4.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@sigstore/core": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-2.0.0.tgz", - "integrity": "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@sigstore/protobuf-specs": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.4.3.tgz", - "integrity": "sha512-fk2zjD9117RL9BjqEwF7fwv7Q/P9yGsMV4MUJZ/DocaQJ6+3pKr+syBq1owU5Q5qGw5CUbXzm+4yJ2JVRDQeSA==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@sigstore/sign": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-3.1.0.tgz", - "integrity": "sha512-knzjmaOHOov1Ur7N/z4B1oPqZ0QX5geUfhrVaqVlu+hl0EAoL4o+l0MSULINcD5GCWe3Z0+YJO8ues6vFlW0Yw==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^3.1.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.4.0", - "make-fetch-happen": "^14.0.2", - "proc-log": "^5.0.0", - "promise-retry": "^2.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@sigstore/tuf": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-3.1.1.tgz", - "integrity": "sha512-eFFvlcBIoGwVkkwmTi/vEQFSva3xs5Ot3WmBcjgjVdiaoelBLQaQ/ZBfhlG0MnG0cmTYScPpk7eDdGDWUcFUmg==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@sigstore/protobuf-specs": "^0.4.1", - "tuf-js": "^3.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@sigstore/verify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-2.1.1.tgz", - "integrity": "sha512-hVJD77oT67aowHxwT4+M6PGOp+E2LtLdTK3+FC0lBO9T7sYwItDMXZ7Z07IDCvR1M717a4axbIWckrW67KMP/w==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^3.1.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.4.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@socket.io/component-emitter": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@socket.io/component-emitter/-/component-emitter-3.1.2.tgz", - "integrity": "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==", - "dev": true, - "license": "MIT" - }, - "node_modules/@tufjs/canonical-json": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-2.0.0.tgz", - "integrity": "sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@tufjs/models": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-3.0.1.tgz", - "integrity": "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@tufjs/canonical-json": "2.0.0", - "minimatch": "^9.0.5" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@tufjs/models/node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@tufjs/models/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@types/cors": { - "version": "2.8.19", - "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.19.tgz", - "integrity": "sha512-mFNylyeyqN93lfe/9CSxOGREz8cpzAhH+E93xJ4xWQf62V8sQ/24reV2nyzUWM6H6Xji+GGHpkbLe7pVoUEskg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/estree": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", - "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/jasmine": { - "version": "5.1.12", - "resolved": "https://registry.npmjs.org/@types/jasmine/-/jasmine-5.1.12.tgz", - "integrity": "sha512-1BzPxNsFDLDfj9InVR3IeY0ZVf4o9XV+4mDqoCfyPkbsA7dYyKAPAb2co6wLFlHcvxPlt1wShm7zQdV7uTfLGA==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/node": { - "version": "24.10.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.0.tgz", - "integrity": "sha512-qzQZRBqkFsYyaSWXuEHc2WR9c0a0CXwiE5FWUvn7ZM+vdy1uZLfCunD38UzhuB7YN/J11ndbDBcTmOdxJo9Q7A==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "undici-types": "~7.16.0" - } - }, - "node_modules/@vitejs/plugin-basic-ssl": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-basic-ssl/-/plugin-basic-ssl-2.1.0.tgz", - "integrity": "sha512-dOxxrhgyDIEUADhb/8OlV9JIqYLgos03YorAueTIeOUskLJSEsfwCByjbu98ctXitUN3znXKp0bYD/WHSudCeA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" - }, - "peerDependencies": { - "vite": "^6.0.0 || ^7.0.0" - } - }, - "node_modules/@yarnpkg/lockfile": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz", - "integrity": "sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==", - "dev": true, - "license": "BSD-2-Clause" - }, - "node_modules/abbrev": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-3.0.1.tgz", - "integrity": "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/accepts": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", - "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", - "dev": true, - "license": "MIT", - "dependencies": { - "mime-types": "^3.0.0", - "negotiator": "^1.0.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/agent-base": { - "version": "7.1.4", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", - "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14" - } - }, - "node_modules/ajv": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "dev": true, - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ajv-formats": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz", - "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ajv": "^8.0.0" - }, - "peerDependencies": { - "ajv": "^8.0.0" - }, - "peerDependenciesMeta": { - "ajv": { - "optional": true - } - } - }, - "node_modules/algoliasearch": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-5.35.0.tgz", - "integrity": "sha512-Y+moNhsqgLmvJdgTsO4GZNgsaDWv8AOGAaPeIeHKlDn/XunoAqYbA+XNpBd1dW8GOXAUDyxC9Rxc7AV4kpFcIg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@algolia/abtesting": "1.1.0", - "@algolia/client-abtesting": "5.35.0", - "@algolia/client-analytics": "5.35.0", - "@algolia/client-common": "5.35.0", - "@algolia/client-insights": "5.35.0", - "@algolia/client-personalization": "5.35.0", - "@algolia/client-query-suggestions": "5.35.0", - "@algolia/client-search": "5.35.0", - "@algolia/ingestion": "1.35.0", - "@algolia/monitoring": "1.35.0", - "@algolia/recommend": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/ansi-escapes": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.2.0.tgz", - "integrity": "sha512-g6LhBsl+GBPRWGWsBtutpzBYuIIdBkLEvad5C/va/74Db018+5TZiyA26cZJAr3Rft5lprVqOIPxf5Vid6tqAw==", - "dev": true, - "license": "MIT", - "dependencies": { - "environment": "^1.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ansi-regex": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", - "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/ansi-styles": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", - "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/anymatch": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", - "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", - "dev": true, - "license": "ISC", - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/anymatch/node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "dev": true, - "license": "MIT" - }, - "node_modules/base64id": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/base64id/-/base64id-2.0.0.tgz", - "integrity": "sha512-lGe34o6EHj9y3Kts9R4ZYs/Gr+6N7MCaMlIFA3F1R2O5/m7K06AxfSeO5530PEERE6/WyEg3lsuyw4GHlPZHog==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^4.5.0 || >= 5.9" - } - }, - "node_modules/baseline-browser-mapping": { - "version": "2.8.25", - "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.25.tgz", - "integrity": "sha512-2NovHVesVF5TXefsGX1yzx1xgr7+m9JQenvz6FQY3qd+YXkKkYiv+vTCc7OriP9mcDZpTC5mAOYN4ocd29+erA==", - "dev": true, - "license": "Apache-2.0", - "bin": { - "baseline-browser-mapping": "dist/cli.js" - } - }, - "node_modules/beasties": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/beasties/-/beasties-0.3.5.tgz", - "integrity": "sha512-NaWu+f4YrJxEttJSm16AzMIFtVldCvaJ68b1L098KpqXmxt9xOLtKoLkKxb8ekhOrLqEJAbvT6n6SEvB/sac7A==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "css-select": "^6.0.0", - "css-what": "^7.0.0", - "dom-serializer": "^2.0.0", - "domhandler": "^5.0.3", - "htmlparser2": "^10.0.0", - "picocolors": "^1.1.1", - "postcss": "^8.4.49", - "postcss-media-query-parser": "^0.2.3" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/binary-extensions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", - "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/body-parser": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", - "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", - "dev": true, - "license": "MIT", - "dependencies": { - "bytes": "^3.1.2", - "content-type": "^1.0.5", - "debug": "^4.4.0", - "http-errors": "^2.0.0", - "iconv-lite": "^0.6.3", - "on-finished": "^2.4.1", - "qs": "^6.14.0", - "raw-body": "^3.0.0", - "type-is": "^2.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/body-parser/node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "dev": true, - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/boolbase": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", - "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", - "dev": true, - "license": "ISC" - }, - "node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, - "license": "MIT", - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/browserslist": { - "version": "4.27.0", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.27.0.tgz", - "integrity": "sha512-AXVQwdhot1eqLihwasPElhX2tAZiBjWdJ9i/Zcj2S6QYIjkx62OKSfnobkriB81C3l4w0rVy3Nt4jaTBltYEpw==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "peer": true, - "dependencies": { - "baseline-browser-mapping": "^2.8.19", - "caniuse-lite": "^1.0.30001751", - "electron-to-chromium": "^1.5.238", - "node-releases": "^2.0.26", - "update-browserslist-db": "^1.1.4" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/buffer-from": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", - "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/cacache": { - "version": "19.0.1", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-19.0.1.tgz", - "integrity": "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^4.0.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^10.0.1", - "minipass": "^7.0.3", - "minipass-collect": "^2.0.1", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^7.0.2", - "ssri": "^12.0.0", - "tar": "^7.4.3", - "unique-filename": "^4.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/cacache/node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/cacache/node_modules/chownr": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", - "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", - "dev": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" - } - }, - "node_modules/cacache/node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", - "dev": true, - "license": "ISC", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/cacache/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/cacache/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/cacache/node_modules/tar": { - "version": "7.5.2", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.2.tgz", - "integrity": "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/fs-minipass": "^4.0.0", - "chownr": "^3.0.0", - "minipass": "^7.1.2", - "minizlib": "^3.1.0", - "yallist": "^5.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/cacache/node_modules/yallist": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", - "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", - "dev": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" - } - }, - "node_modules/call-bind-apply-helpers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", - "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/call-bound": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", - "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "get-intrinsic": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/caniuse-lite": { - "version": "1.0.30001754", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001754.tgz", - "integrity": "sha512-x6OeBXueoAceOmotzx3PO4Zpt4rzpeIFsSr6AAePTZxSkXiYDUmpypEl7e2+8NCd9bD7bXjqyef8CJYPC1jfxg==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "CC-BY-4.0" - }, - "node_modules/chalk": { - "version": "5.6.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz", - "integrity": "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/chardet": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-2.1.1.tgz", - "integrity": "sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/chokidar": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", - "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", - "dev": true, - "license": "MIT", - "dependencies": { - "readdirp": "^4.0.1" - }, - "engines": { - "node": ">= 14.16.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/cli-cursor": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz", - "integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==", - "dev": true, - "license": "MIT", - "dependencies": { - "restore-cursor": "^5.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-spinners": { - "version": "2.9.2", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", - "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-truncate": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-4.0.0.tgz", - "integrity": "sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA==", - "dev": true, - "license": "MIT", - "dependencies": { - "slice-ansi": "^5.0.0", - "string-width": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-width": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz", - "integrity": "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">= 12" - } - }, - "node_modules/cliui": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-9.0.1.tgz", - "integrity": "sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w==", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^7.2.0", - "strip-ansi": "^7.1.0", - "wrap-ansi": "^9.0.0" - }, - "engines": { - "node": ">=20" - } - }, - "node_modules/cliui/node_modules/wrap-ansi": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz", - "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.2.1", - "string-width": "^7.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true, - "license": "MIT" - }, - "node_modules/colorette": { - "version": "2.0.20", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", - "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", - "dev": true, - "license": "MIT" - }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "dev": true, - "license": "MIT" - }, - "node_modules/connect": { - "version": "3.7.0", - "resolved": "https://registry.npmjs.org/connect/-/connect-3.7.0.tgz", - "integrity": "sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "2.6.9", - "finalhandler": "1.1.2", - "parseurl": "~1.3.3", - "utils-merge": "1.0.1" - }, - "engines": { - "node": ">= 0.10.0" - } - }, - "node_modules/connect/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/connect/node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/connect/node_modules/finalhandler": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", - "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "2.6.9", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "on-finished": "~2.3.0", - "parseurl": "~1.3.3", - "statuses": "~1.5.0", - "unpipe": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/connect/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "license": "MIT" - }, - "node_modules/connect/node_modules/on-finished": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", - "dev": true, - "license": "MIT", - "dependencies": { - "ee-first": "1.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/connect/node_modules/statuses": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", - "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/content-disposition": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", - "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", - "dev": true, - "license": "MIT", - "dependencies": { - "safe-buffer": "5.2.1" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/content-type": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", - "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/convert-source-map": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", - "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", - "dev": true, - "license": "MIT" - }, - "node_modules/cookie": { - "version": "0.7.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", - "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/cookie-signature": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", - "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.6.0" - } - }, - "node_modules/cors": { - "version": "2.8.5", - "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", - "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", - "dev": true, - "license": "MIT", - "dependencies": { - "object-assign": "^4", - "vary": "^1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/css-select": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-6.0.0.tgz", - "integrity": "sha512-rZZVSLle8v0+EY8QAkDWrKhpgt6SA5OtHsgBnsj6ZaLb5dmDVOWUDtQitd9ydxxvEjhewNudS6eTVU7uOyzvXw==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "boolbase": "^1.0.0", - "css-what": "^7.0.0", - "domhandler": "^5.0.3", - "domutils": "^3.2.2", - "nth-check": "^2.1.1" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/css-what": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/css-what/-/css-what-7.0.0.tgz", - "integrity": "sha512-wD5oz5xibMOPHzy13CyGmogB3phdvcDaB5t0W/Nr5Z2O/agcB8YwOz6e2Lsp10pNDzBoDO9nVa3RGs/2BttpHQ==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">= 6" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/custom-event": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/custom-event/-/custom-event-1.0.1.tgz", - "integrity": "sha512-GAj5FOq0Hd+RsCGVJxZuKaIDXDf3h6GQoNEjFgbLLI/trgtavwUbSnZ5pVfg27DVCaWjIohryS0JFwIJyT2cMg==", - "dev": true, - "license": "MIT" - }, - "node_modules/date-format": { - "version": "4.0.14", - "resolved": "https://registry.npmjs.org/date-format/-/date-format-4.0.14.tgz", - "integrity": "sha512-39BOQLs9ZjKh0/patS9nrT8wc3ioX3/eA/zgbKNopnF2wCqJEoxywwwElATYvRsXdnOxA/OQeQoFZ3rFjVajhg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/debug": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", - "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/destroy": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", - "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/detect-libc": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", - "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", - "dev": true, - "license": "Apache-2.0", - "optional": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/di": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/di/-/di-0.0.1.tgz", - "integrity": "sha512-uJaamHkagcZtHPqCIHZxnFrXlunQXgBOsZSUOWwFw31QJCAbyTBoHMW75YOTur5ZNx8pIeAKgf6GWIgaqqiLhA==", - "dev": true, - "license": "MIT" - }, - "node_modules/dom-serialize": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/dom-serialize/-/dom-serialize-2.2.1.tgz", - "integrity": "sha512-Yra4DbvoW7/Z6LBN560ZwXMjoNOSAN2wRsKFGc4iBeso+mpIA6qj1vfdf9HpMaKAqG6wXTy+1SYEzmNpKXOSsQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "custom-event": "~1.0.0", - "ent": "~2.2.0", - "extend": "^3.0.0", - "void-elements": "^2.0.0" - } - }, - "node_modules/dom-serializer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", - "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", - "dev": true, - "license": "MIT", - "dependencies": { - "domelementtype": "^2.3.0", - "domhandler": "^5.0.2", - "entities": "^4.2.0" - }, - "funding": { - "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" - } - }, - "node_modules/domelementtype": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", - "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ], - "license": "BSD-2-Clause" - }, - "node_modules/domhandler": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", - "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "domelementtype": "^2.3.0" - }, - "engines": { - "node": ">= 4" - }, - "funding": { - "url": "https://github.com/fb55/domhandler?sponsor=1" - } - }, - "node_modules/domutils": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz", - "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "dom-serializer": "^2.0.0", - "domelementtype": "^2.3.0", - "domhandler": "^5.0.3" - }, - "funding": { - "url": "https://github.com/fb55/domutils?sponsor=1" - } - }, - "node_modules/dunder-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", - "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-errors": "^1.3.0", - "gopd": "^1.2.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "dev": true, - "license": "MIT" - }, - "node_modules/ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", - "dev": true, - "license": "MIT" - }, - "node_modules/electron-to-chromium": { - "version": "1.5.249", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.249.tgz", - "integrity": "sha512-5vcfL3BBe++qZ5kuFhD/p8WOM1N9m3nwvJPULJx+4xf2usSlZFJ0qoNYO2fOX4hi3ocuDcmDobtA+5SFr4OmBg==", - "dev": true, - "license": "ISC" - }, - "node_modules/emoji-regex": { - "version": "10.6.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz", - "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==", - "dev": true, - "license": "MIT" - }, - "node_modules/encodeurl": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", - "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/encoding": { - "version": "0.1.13", - "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", - "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", - "dev": true, - "license": "MIT", - "optional": true, - "dependencies": { - "iconv-lite": "^0.6.2" - } - }, - "node_modules/encoding/node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "dev": true, - "license": "MIT", - "optional": true, - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/engine.io": { - "version": "6.6.4", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.6.4.tgz", - "integrity": "sha512-ZCkIjSYNDyGn0R6ewHDtXgns/Zre/NT6Agvq1/WobF7JXgFff4SeDroKiCO3fNJreU9YG429Sc81o4w5ok/W5g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/cors": "^2.8.12", - "@types/node": ">=10.0.0", - "accepts": "~1.3.4", - "base64id": "2.0.0", - "cookie": "~0.7.2", - "cors": "~2.8.5", - "debug": "~4.3.1", - "engine.io-parser": "~5.2.1", - "ws": "~8.17.1" - }, - "engines": { - "node": ">=10.2.0" - } - }, - "node_modules/engine.io-parser": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.2.3.tgz", - "integrity": "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/engine.io/node_modules/accepts": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", - "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", - "dev": true, - "license": "MIT", - "dependencies": { - "mime-types": "~2.1.34", - "negotiator": "0.6.3" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/engine.io/node_modules/debug": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", - "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/engine.io/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/engine.io/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dev": true, - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/engine.io/node_modules/negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/ent": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.2.tgz", - "integrity": "sha512-kKvD1tO6BM+oK9HzCPpUdRb4vKFQY/FPTFmurMvh6LlN68VMrdj77w8yp51/kDbpkFOS9J8w5W6zIzgM2H8/hw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "es-errors": "^1.3.0", - "punycode": "^1.4.1", - "safe-regex-test": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/entities": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/env-paths": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", - "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/environment": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz", - "integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/err-code": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", - "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", - "dev": true, - "license": "MIT" - }, - "node_modules/es-define-property": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", - "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-errors": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-object-atoms": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", - "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/esbuild": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.9.tgz", - "integrity": "sha512-CRbODhYyQx3qp7ZEwzxOk4JBqmD/seJrzPa/cGjY1VtIn5E09Oi9/dB4JwctnfZ8Q8iT7rioVv5k/FNT/uf54g==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.9", - "@esbuild/android-arm": "0.25.9", - "@esbuild/android-arm64": "0.25.9", - "@esbuild/android-x64": "0.25.9", - "@esbuild/darwin-arm64": "0.25.9", - "@esbuild/darwin-x64": "0.25.9", - "@esbuild/freebsd-arm64": "0.25.9", - "@esbuild/freebsd-x64": "0.25.9", - "@esbuild/linux-arm": "0.25.9", - "@esbuild/linux-arm64": "0.25.9", - "@esbuild/linux-ia32": "0.25.9", - "@esbuild/linux-loong64": "0.25.9", - "@esbuild/linux-mips64el": "0.25.9", - "@esbuild/linux-ppc64": "0.25.9", - "@esbuild/linux-riscv64": "0.25.9", - "@esbuild/linux-s390x": "0.25.9", - "@esbuild/linux-x64": "0.25.9", - "@esbuild/netbsd-arm64": "0.25.9", - "@esbuild/netbsd-x64": "0.25.9", - "@esbuild/openbsd-arm64": "0.25.9", - "@esbuild/openbsd-x64": "0.25.9", - "@esbuild/openharmony-arm64": "0.25.9", - "@esbuild/sunos-x64": "0.25.9", - "@esbuild/win32-arm64": "0.25.9", - "@esbuild/win32-ia32": "0.25.9", - "@esbuild/win32-x64": "0.25.9" - } - }, - "node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", - "dev": true, - "license": "MIT" - }, - "node_modules/etag": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/eventemitter3": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", - "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", - "dev": true, - "license": "MIT" - }, - "node_modules/eventsource": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz", - "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==", - "dev": true, - "license": "MIT", - "dependencies": { - "eventsource-parser": "^3.0.1" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/eventsource-parser": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.6.tgz", - "integrity": "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/exponential-backoff": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.3.tgz", - "integrity": "sha512-ZgEeZXj30q+I0EN+CbSSpIyPaJ5HVQD18Z1m+u1FXbAeT94mr1zw50q4q6jiiC447Nl/YTcIYSAftiGqetwXCA==", - "dev": true, - "license": "Apache-2.0" - }, - "node_modules/express": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", - "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "accepts": "^2.0.0", - "body-parser": "^2.2.0", - "content-disposition": "^1.0.0", - "content-type": "^1.0.5", - "cookie": "^0.7.1", - "cookie-signature": "^1.2.1", - "debug": "^4.4.0", - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "etag": "^1.8.1", - "finalhandler": "^2.1.0", - "fresh": "^2.0.0", - "http-errors": "^2.0.0", - "merge-descriptors": "^2.0.0", - "mime-types": "^3.0.0", - "on-finished": "^2.4.1", - "once": "^1.4.0", - "parseurl": "^1.3.3", - "proxy-addr": "^2.0.7", - "qs": "^6.14.0", - "range-parser": "^1.2.1", - "router": "^2.2.0", - "send": "^1.1.0", - "serve-static": "^2.2.0", - "statuses": "^2.0.1", - "type-is": "^2.0.1", - "vary": "^1.1.2" - }, - "engines": { - "node": ">= 18" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/express-rate-limit": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.1.tgz", - "integrity": "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 16" - }, - "funding": { - "url": "https://github.com/sponsors/express-rate-limit" - }, - "peerDependencies": { - "express": ">= 4.11" - } - }, - "node_modules/extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-uri": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", - "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/fastify" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/fastify" - } - ], - "license": "BSD-3-Clause" - }, - "node_modules/fdir": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", - "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12.0.0" - }, - "peerDependencies": { - "picomatch": "^3 || ^4" - }, - "peerDependenciesMeta": { - "picomatch": { - "optional": true - } - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/finalhandler": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", - "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.4.0", - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "on-finished": "^2.4.1", - "parseurl": "^1.3.3", - "statuses": "^2.0.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/flatted": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", - "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", - "dev": true, - "license": "ISC" - }, - "node_modules/follow-redirects": { - "version": "1.15.11", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", - "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", - "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "license": "MIT", - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } - }, - "node_modules/foreground-child": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", - "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", - "dev": true, - "license": "ISC", - "dependencies": { - "cross-spawn": "^7.0.6", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/forwarded": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", - "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/fresh": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", - "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/fs-extra": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz", - "integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==", - "dev": true, - "license": "MIT", - "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^4.0.0", - "universalify": "^0.1.0" - }, - "engines": { - "node": ">=6 <7 || >=8" - } - }, - "node_modules/fs-minipass": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.3.tgz", - "integrity": "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^7.0.3" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true, - "license": "ISC" - }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true, - "license": "ISC", - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/get-east-asian-width": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz", - "integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/get-intrinsic": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", - "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", - "function-bind": "^1.1.2", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "math-intrinsics": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", - "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "dev": true, - "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/glob-to-regexp": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", - "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", - "dev": true, - "license": "BSD-2-Clause" - }, - "node_modules/gopd": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", - "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/has-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", - "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-tostringtag": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", - "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-symbols": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/hosted-git-info": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.2.tgz", - "integrity": "sha512-M422h7o/BR3rmCQ8UHi7cyyMqKltdP9Uo+J2fXK+RSAY+wTcKOIRyhTuKv4qn+DJf3g+PL890AzId5KZpX+CBg==", - "dev": true, - "license": "ISC", - "dependencies": { - "lru-cache": "^11.1.0" - }, - "engines": { - "node": "^20.17.0 || >=22.9.0" - } - }, - "node_modules/hosted-git-info/node_modules/lru-cache": { - "version": "11.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.2.tgz", - "integrity": "sha512-F9ODfyqML2coTIsQpSkRHnLSZMtkU8Q+mSfcaIyKwy58u+8k5nvAYeiNhsyMARvzNcXJ9QfWVrcPsC9e9rAxtg==", - "dev": true, - "license": "ISC", - "engines": { - "node": "20 || >=22" - } - }, - "node_modules/html-escaper": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", - "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", - "dev": true, - "license": "MIT" - }, - "node_modules/htmlparser2": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-10.0.0.tgz", - "integrity": "sha512-TwAZM+zE5Tq3lrEHvOlvwgj1XLWQCtaaibSN11Q+gGBAS7Y1uZSWwXXRe4iF6OXnaq1riyQAPFOBtYc77Mxq0g==", - "dev": true, - "funding": [ - "https://github.com/fb55/htmlparser2?sponsor=1", - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ], - "license": "MIT", - "dependencies": { - "domelementtype": "^2.3.0", - "domhandler": "^5.0.3", - "domutils": "^3.2.1", - "entities": "^6.0.0" - } - }, - "node_modules/htmlparser2/node_modules/entities": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", - "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/http-cache-semantics": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz", - "integrity": "sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==", - "dev": true, - "license": "BSD-2-Clause" - }, - "node_modules/http-errors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/http-errors/node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/http-proxy": { - "version": "1.18.1", - "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz", - "integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "eventemitter3": "^4.0.0", - "follow-redirects": "^1.0.0", - "requires-port": "^1.0.0" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/http-proxy-agent": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", - "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.1.0", - "debug": "^4.3.4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/https-proxy-agent": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", - "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.1.2", - "debug": "4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/iconv-lite": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.0.tgz", - "integrity": "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/ignore-walk": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-8.0.0.tgz", - "integrity": "sha512-FCeMZT4NiRQGh+YkeKMtWrOmBgWjHjMJ26WQWrRQyoyzqevdaGSakUaJW5xQYmjLlUVk2qUnCjYVBax9EKKg8A==", - "dev": true, - "license": "ISC", - "dependencies": { - "minimatch": "^10.0.3" - }, - "engines": { - "node": "^20.17.0 || >=22.9.0" - } - }, - "node_modules/ignore-walk/node_modules/minimatch": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz", - "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/brace-expansion": "^5.0.0" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/immutable": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/immutable/-/immutable-5.1.4.tgz", - "integrity": "sha512-p6u1bG3YSnINT5RQmx/yRZBpenIl30kVxkTLDyHLIMk0gict704Q9n+thfDI7lTRm9vXdDYutVzXhzcThxTnXA==", - "dev": true, - "license": "MIT" - }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "dev": true, - "license": "ISC", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/ini": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ini/-/ini-5.0.0.tgz", - "integrity": "sha512-+N0ngpO3e7cRUWOJAS7qw0IZIVc6XPrW4MlFBdD066F2L4k1L6ker3hLqSq7iXxU5tgS4WGkIUElWn5vogAEnw==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/ip-address": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.1.0.tgz", - "integrity": "sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 12" - } - }, - "node_modules/ipaddr.js": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", - "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "dev": true, - "license": "MIT", - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-core-module": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", - "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", - "dev": true, - "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz", - "integrity": "sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-interactive": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-2.0.0.tgz", - "integrity": "sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-promise": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", - "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/is-regex": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", - "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "gopd": "^1.2.0", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-unicode-supported": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz", - "integrity": "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/isbinaryfile": { - "version": "4.0.10", - "resolved": "https://registry.npmjs.org/isbinaryfile/-/isbinaryfile-4.0.10.tgz", - "integrity": "sha512-iHrqe5shvBUcFbmZq9zOQHBoeOhZJu6RQGrDpBgenUm/Am+F3JM2MgQj+rK3Z601fzrL5gLZWtAPH2OBaSVcyw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/gjtorikian/" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true, - "license": "ISC" - }, - "node_modules/istanbul-lib-coverage": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", - "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=8" - } - }, - "node_modules/istanbul-lib-instrument": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", - "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@babel/core": "^7.23.9", - "@babel/parser": "^7.23.9", - "@istanbuljs/schema": "^0.1.3", - "istanbul-lib-coverage": "^3.2.0", - "semver": "^7.5.4" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/istanbul-lib-report": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", - "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "istanbul-lib-coverage": "^3.0.0", - "make-dir": "^4.0.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/istanbul-lib-source-maps": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", - "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "debug": "^4.1.1", - "istanbul-lib-coverage": "^3.0.0", - "source-map": "^0.6.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/istanbul-lib-source-maps/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/istanbul-reports": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz", - "integrity": "sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "html-escaper": "^2.0.0", - "istanbul-lib-report": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jackspeak": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", - "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, - "node_modules/jasmine-core": { - "version": "5.9.0", - "resolved": "https://registry.npmjs.org/jasmine-core/-/jasmine-core-5.9.0.tgz", - "integrity": "sha512-OMUvF1iI6+gSRYOhMrH4QYothVLN9C3EJ6wm4g7zLJlnaTl8zbaPOr0bTw70l7QxkoM7sVFOWo83u9B2Fe2Zng==", - "dev": true, - "license": "MIT", - "peer": true - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/jsesc": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", - "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", - "dev": true, - "license": "MIT", - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/json-parse-even-better-errors": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-4.0.0.tgz", - "integrity": "sha512-lR4MXjGNgkJc7tkQ97kb2nuEMnNCyU//XYVH0MKTGcXEiSudQ5MKGKen3C5QubYy0vmq+JGitUg92uuywGEwIA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "dev": true, - "license": "MIT" - }, - "node_modules/json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "dev": true, - "license": "MIT", - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/jsonc-parser": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.3.1.tgz", - "integrity": "sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/jsonfile": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", - "integrity": "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==", - "dev": true, - "license": "MIT", - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, - "node_modules/jsonparse": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", - "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==", - "dev": true, - "engines": [ - "node >= 0.2.0" - ], - "license": "MIT" - }, - "node_modules/karma": { - "version": "6.4.4", - "resolved": "https://registry.npmjs.org/karma/-/karma-6.4.4.tgz", - "integrity": "sha512-LrtUxbdvt1gOpo3gxG+VAJlJAEMhbWlM4YrFQgql98FwF7+K8K12LYO4hnDdUkNjeztYrOXEMqgTajSWgmtI/w==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "@colors/colors": "1.5.0", - "body-parser": "^1.19.0", - "braces": "^3.0.2", - "chokidar": "^3.5.1", - "connect": "^3.7.0", - "di": "^0.0.1", - "dom-serialize": "^2.2.1", - "glob": "^7.1.7", - "graceful-fs": "^4.2.6", - "http-proxy": "^1.18.1", - "isbinaryfile": "^4.0.8", - "lodash": "^4.17.21", - "log4js": "^6.4.1", - "mime": "^2.5.2", - "minimatch": "^3.0.4", - "mkdirp": "^0.5.5", - "qjobs": "^1.2.0", - "range-parser": "^1.2.1", - "rimraf": "^3.0.2", - "socket.io": "^4.7.2", - "source-map": "^0.6.1", - "tmp": "^0.2.1", - "ua-parser-js": "^0.7.30", - "yargs": "^16.1.1" - }, - "bin": { - "karma": "bin/karma" - }, - "engines": { - "node": ">= 10" - } - }, - "node_modules/karma-chrome-launcher": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/karma-chrome-launcher/-/karma-chrome-launcher-3.2.0.tgz", - "integrity": "sha512-rE9RkUPI7I9mAxByQWkGJFXfFD6lE4gC5nPuZdobf/QdTEJI6EU4yIay/cfU/xV4ZxlM5JiTv7zWYgA64NpS5Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "which": "^1.2.1" - } - }, - "node_modules/karma-chrome-launcher/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" - } - }, - "node_modules/karma-coverage": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/karma-coverage/-/karma-coverage-2.2.1.tgz", - "integrity": "sha512-yj7hbequkQP2qOSb20GuNSIyE//PgJWHwC2IydLE6XRtsnaflv+/OSGNssPjobYUlhVVagy99TQpqUt3vAUG7A==", - "dev": true, - "license": "MIT", - "dependencies": { - "istanbul-lib-coverage": "^3.2.0", - "istanbul-lib-instrument": "^5.1.0", - "istanbul-lib-report": "^3.0.0", - "istanbul-lib-source-maps": "^4.0.1", - "istanbul-reports": "^3.0.5", - "minimatch": "^3.0.4" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/karma-coverage/node_modules/istanbul-lib-instrument": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", - "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@babel/core": "^7.12.3", - "@babel/parser": "^7.14.7", - "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-coverage": "^3.2.0", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/karma-coverage/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/karma-jasmine": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/karma-jasmine/-/karma-jasmine-5.1.0.tgz", - "integrity": "sha512-i/zQLFrfEpRyQoJF9fsCdTMOF5c2dK7C7OmsuKg2D0YSsuZSfQDiLuaiktbuio6F2wiCsZSnSnieIQ0ant/uzQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "jasmine-core": "^4.1.0" - }, - "engines": { - "node": ">=12" - }, - "peerDependencies": { - "karma": "^6.0.0" - } - }, - "node_modules/karma-jasmine-html-reporter": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/karma-jasmine-html-reporter/-/karma-jasmine-html-reporter-2.1.0.tgz", - "integrity": "sha512-sPQE1+nlsn6Hwb5t+HHwyy0A1FNCVKuL1192b+XNauMYWThz2kweiBVW1DqloRpVvZIJkIoHVB7XRpK78n1xbQ==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "jasmine-core": "^4.0.0 || ^5.0.0", - "karma": "^6.0.0", - "karma-jasmine": "^5.0.0" - } - }, - "node_modules/karma-jasmine/node_modules/jasmine-core": { - "version": "4.6.1", - "resolved": "https://registry.npmjs.org/jasmine-core/-/jasmine-core-4.6.1.tgz", - "integrity": "sha512-VYz/BjjmC3klLJlLwA4Kw8ytk0zDSmbbDLNs794VnWmkcCB7I9aAL/D48VNQtmITyPvea2C3jdUMfc3kAoy0PQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/karma/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/karma/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/karma/node_modules/body-parser": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", - "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", - "dev": true, - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "content-type": "~1.0.5", - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.13.0", - "raw-body": "2.5.2", - "type-is": "~1.6.18", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/karma/node_modules/chokidar": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", - "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", - "dev": true, - "license": "MIT", - "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/karma/node_modules/cliui": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^7.0.0" - } - }, - "node_modules/karma/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/karma/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/karma/node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "dev": true, - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/karma/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/karma/node_modules/media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/karma/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/karma/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dev": true, - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/karma/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "license": "MIT" - }, - "node_modules/karma/node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/karma/node_modules/qs": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", - "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "side-channel": "^1.0.6" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/karma/node_modules/raw-body": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", - "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", - "dev": true, - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/karma/node_modules/readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", - "dev": true, - "license": "MIT", - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/karma/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/karma/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/karma/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/karma/node_modules/type-is": { - "version": "1.6.18", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", - "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/karma/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/karma/node_modules/yargs": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", - "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", - "dev": true, - "license": "MIT", - "dependencies": { - "cliui": "^7.0.2", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.0", - "y18n": "^5.0.5", - "yargs-parser": "^20.2.2" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/karma/node_modules/yargs-parser": { - "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/listr2": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/listr2/-/listr2-9.0.1.tgz", - "integrity": "sha512-SL0JY3DaxylDuo/MecFeiC+7pedM0zia33zl0vcjgwcq1q1FWWF1To9EIauPbl8GbMCU0R2e0uJ8bZunhYKD2g==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "cli-truncate": "^4.0.0", - "colorette": "^2.0.20", - "eventemitter3": "^5.0.1", - "log-update": "^6.1.0", - "rfdc": "^1.4.1", - "wrap-ansi": "^9.0.0" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/listr2/node_modules/eventemitter3": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", - "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", - "dev": true, - "license": "MIT" - }, - "node_modules/listr2/node_modules/wrap-ansi": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz", - "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.2.1", - "string-width": "^7.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/lmdb": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/lmdb/-/lmdb-3.4.2.tgz", - "integrity": "sha512-nwVGUfTBUwJKXd6lRV8pFNfnrCC1+l49ESJRM19t/tFb/97QfJEixe5DYRvug5JO7DSFKoKaVy7oGMt5rVqZvg==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "dependencies": { - "msgpackr": "^1.11.2", - "node-addon-api": "^6.1.0", - "node-gyp-build-optional-packages": "5.2.2", - "ordered-binary": "^1.5.3", - "weak-lru-cache": "^1.2.2" - }, - "bin": { - "download-lmdb-prebuilds": "bin/download-prebuilds.js" - }, - "optionalDependencies": { - "@lmdb/lmdb-darwin-arm64": "3.4.2", - "@lmdb/lmdb-darwin-x64": "3.4.2", - "@lmdb/lmdb-linux-arm": "3.4.2", - "@lmdb/lmdb-linux-arm64": "3.4.2", - "@lmdb/lmdb-linux-x64": "3.4.2", - "@lmdb/lmdb-win32-arm64": "3.4.2", - "@lmdb/lmdb-win32-x64": "3.4.2" - } - }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true, - "license": "MIT" - }, - "node_modules/log-symbols": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-6.0.0.tgz", - "integrity": "sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^5.3.0", - "is-unicode-supported": "^1.3.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/log-symbols/node_modules/is-unicode-supported": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz", - "integrity": "sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/log-update": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz", - "integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-escapes": "^7.0.0", - "cli-cursor": "^5.0.0", - "slice-ansi": "^7.1.0", - "strip-ansi": "^7.1.0", - "wrap-ansi": "^9.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/log-update/node_modules/is-fullwidth-code-point": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.1.0.tgz", - "integrity": "sha512-5XHYaSyiqADb4RnZ1Bdad6cPp8Toise4TzEjcOYDHZkTCbKgiUl7WTUCpNWHuxmDt91wnsZBc9xinNzopv3JMQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "get-east-asian-width": "^1.3.1" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/log-update/node_modules/slice-ansi": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.2.tgz", - "integrity": "sha512-iOBWFgUX7caIZiuutICxVgX1SdxwAVFFKwt1EvMYYec/NWO5meOJ6K5uQxhrYBdQJne4KxiqZc+KptFOWFSI9w==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.2.1", - "is-fullwidth-code-point": "^5.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/chalk/slice-ansi?sponsor=1" - } - }, - "node_modules/log-update/node_modules/wrap-ansi": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz", - "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.2.1", - "string-width": "^7.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/log4js": { - "version": "6.9.1", - "resolved": "https://registry.npmjs.org/log4js/-/log4js-6.9.1.tgz", - "integrity": "sha512-1somDdy9sChrr9/f4UlzhdaGfDR2c/SaD2a4T7qEkG4jTS57/B3qmnjLYePwQ8cqWnUHZI0iAKxMBpCZICiZ2g==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "date-format": "^4.0.14", - "debug": "^4.3.4", - "flatted": "^3.2.7", - "rfdc": "^1.3.0", - "streamroller": "^3.1.5" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^3.0.2" - } - }, - "node_modules/magic-string": { - "version": "0.30.17", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", - "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0" - } - }, - "node_modules/make-dir": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", - "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", - "dev": true, - "license": "MIT", - "dependencies": { - "semver": "^7.5.3" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/make-fetch-happen": { - "version": "14.0.3", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-14.0.3.tgz", - "integrity": "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/agent": "^3.0.0", - "cacache": "^19.0.1", - "http-cache-semantics": "^4.1.1", - "minipass": "^7.0.2", - "minipass-fetch": "^4.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^1.0.0", - "proc-log": "^5.0.0", - "promise-retry": "^2.0.1", - "ssri": "^12.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/math-intrinsics": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/media-typer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", - "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/merge-descriptors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", - "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "license": "MIT", - "optional": true, - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/micromatch/node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "optional": true, - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/mime": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", - "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", - "dev": true, - "license": "MIT", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/mime-db": { - "version": "1.54.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", - "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", - "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", - "dev": true, - "license": "MIT", - "dependencies": { - "mime-db": "^1.54.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mimic-function": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", - "integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/minimist": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", - "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/minipass-collect": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-2.0.1.tgz", - "integrity": "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^7.0.3" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/minipass-fetch": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-4.0.1.tgz", - "integrity": "sha512-j7U11C5HXigVuutxebFadoYBbd7VSdZWggSe64NVdvWNBqGAiXPL2QVCehjmw7lY1oF9gOllYbORh+hiNgfPgQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "minipass": "^7.0.3", - "minipass-sized": "^1.0.3", - "minizlib": "^3.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - }, - "optionalDependencies": { - "encoding": "^0.1.13" - } - }, - "node_modules/minipass-flush": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", - "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minipass-flush/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-flush/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true, - "license": "ISC" - }, - "node_modules/minipass-pipeline": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", - "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-pipeline/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-pipeline/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true, - "license": "ISC" - }, - "node_modules/minipass-sized": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", - "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-sized/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-sized/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true, - "license": "ISC" - }, - "node_modules/minizlib": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", - "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", - "dev": true, - "license": "MIT", - "dependencies": { - "minipass": "^7.1.2" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/mkdirp": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", - "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", - "dev": true, - "license": "MIT", - "dependencies": { - "minimist": "^1.2.6" - }, - "bin": { - "mkdirp": "bin/cmd.js" - } - }, - "node_modules/mrmime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz", - "integrity": "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true, - "license": "MIT" - }, - "node_modules/msgpackr": { - "version": "1.11.5", - "resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.11.5.tgz", - "integrity": "sha512-UjkUHN0yqp9RWKy0Lplhh+wlpdt9oQBYgULZOiFhV3VclSF1JnSQWZ5r9gORQlNYaUKQoR8itv7g7z1xDDuACA==", - "dev": true, - "license": "MIT", - "optional": true, - "optionalDependencies": { - "msgpackr-extract": "^3.0.2" - } - }, - "node_modules/msgpackr-extract": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz", - "integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "dependencies": { - "node-gyp-build-optional-packages": "5.2.2" - }, - "bin": { - "download-msgpackr-prebuilds": "bin/download-prebuilds.js" - }, - "optionalDependencies": { - "@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3", - "@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3", - "@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3", - "@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3", - "@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3", - "@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3" - } - }, - "node_modules/mute-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-3.0.0.tgz", - "integrity": "sha512-dkEJPVvun4FryqBmZ5KhDo0K9iDXAwn08tMLDinNdRBNPcYEDiWYysLcc6k3mjTMlbP9KyylvRpd4wFtwrT9rw==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^20.17.0 || >=22.9.0" - } - }, - "node_modules/nanoid": { - "version": "3.3.11", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", - "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/negotiator": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", - "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/node-addon-api": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-6.1.0.tgz", - "integrity": "sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==", - "dev": true, - "license": "MIT", - "optional": true - }, - "node_modules/node-gyp": { - "version": "11.5.0", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-11.5.0.tgz", - "integrity": "sha512-ra7Kvlhxn5V9Slyus0ygMa2h+UqExPqUIkfk7Pc8QTLT956JLSy51uWFwHtIYy0vI8cB4BDhc/S03+880My/LQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "env-paths": "^2.2.0", - "exponential-backoff": "^3.1.1", - "graceful-fs": "^4.2.6", - "make-fetch-happen": "^14.0.3", - "nopt": "^8.0.0", - "proc-log": "^5.0.0", - "semver": "^7.3.5", - "tar": "^7.4.3", - "tinyglobby": "^0.2.12", - "which": "^5.0.0" - }, - "bin": { - "node-gyp": "bin/node-gyp.js" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/node-gyp-build-optional-packages": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz", - "integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==", - "dev": true, - "license": "MIT", - "optional": true, - "dependencies": { - "detect-libc": "^2.0.1" - }, - "bin": { - "node-gyp-build-optional-packages": "bin.js", - "node-gyp-build-optional-packages-optional": "optional.js", - "node-gyp-build-optional-packages-test": "build-test.js" - } - }, - "node_modules/node-gyp/node_modules/chownr": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", - "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", - "dev": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" - } - }, - "node_modules/node-gyp/node_modules/isexe": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", - "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16" - } - }, - "node_modules/node-gyp/node_modules/tar": { - "version": "7.5.2", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.2.tgz", - "integrity": "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/fs-minipass": "^4.0.0", - "chownr": "^3.0.0", - "minipass": "^7.1.2", - "minizlib": "^3.1.0", - "yallist": "^5.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/node-gyp/node_modules/which": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz", - "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^3.1.1" - }, - "bin": { - "node-which": "bin/which.js" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/node-gyp/node_modules/yallist": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", - "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", - "dev": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" - } - }, - "node_modules/node-releases": { - "version": "2.0.27", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", - "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", - "dev": true, - "license": "MIT" - }, - "node_modules/nopt": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-8.1.0.tgz", - "integrity": "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A==", - "dev": true, - "license": "ISC", - "dependencies": { - "abbrev": "^3.0.0" - }, - "bin": { - "nopt": "bin/nopt.js" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/npm-bundled": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-4.0.0.tgz", - "integrity": "sha512-IxaQZDMsqfQ2Lz37VvyyEtKLe8FsRZuysmedy/N06TU1RyVppYKXrO4xIhR0F+7ubIBox6Q7nir6fQI3ej39iA==", - "dev": true, - "license": "ISC", - "dependencies": { - "npm-normalize-package-bin": "^4.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm-install-checks": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-7.1.2.tgz", - "integrity": "sha512-z9HJBCYw9Zr8BqXcllKIs5nI+QggAImbBdHphOzVYrz2CB4iQ6FzWyKmlqDZua+51nAu7FcemlbTc9VgQN5XDQ==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "semver": "^7.1.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm-normalize-package-bin": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-4.0.0.tgz", - "integrity": "sha512-TZKxPvItzai9kN9H/TkmCtx/ZN/hvr3vUycjlfmH0ootY9yFBzNOpiXAdIn1Iteqsvk4lQn6B5PTrt+n6h8k/w==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm-package-arg": { - "version": "13.0.0", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz", - "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "hosted-git-info": "^9.0.0", - "proc-log": "^5.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^6.0.0" - }, - "engines": { - "node": "^20.17.0 || >=22.9.0" - } - }, - "node_modules/npm-packlist": { - "version": "10.0.3", - "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-10.0.3.tgz", - "integrity": "sha512-zPukTwJMOu5X5uvm0fztwS5Zxyvmk38H/LfidkOMt3gbZVCyro2cD/ETzwzVPcWZA3JOyPznfUN/nkyFiyUbxg==", - "dev": true, - "license": "ISC", - "dependencies": { - "ignore-walk": "^8.0.0", - "proc-log": "^6.0.0" - }, - "engines": { - "node": "^20.17.0 || >=22.9.0" - } - }, - "node_modules/npm-packlist/node_modules/proc-log": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-6.0.0.tgz", - "integrity": "sha512-KG/XsTDN901PNfPfAMmj6N/Ywg9tM+bHK8pAz+27fS4N4Pcr+4zoYBOcGSBu6ceXYNPxkLpa4ohtfxV1XcLAfA==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^20.17.0 || >=22.9.0" - } - }, - "node_modules/npm-pick-manifest": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz", - "integrity": "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "npm-install-checks": "^7.1.0", - "npm-normalize-package-bin": "^4.0.0", - "npm-package-arg": "^12.0.0", - "semver": "^7.3.5" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm-pick-manifest/node_modules/hosted-git-info": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz", - "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==", - "dev": true, - "license": "ISC", - "dependencies": { - "lru-cache": "^10.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm-pick-manifest/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/npm-pick-manifest/node_modules/npm-package-arg": { - "version": "12.0.2", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz", - "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==", - "dev": true, - "license": "ISC", - "dependencies": { - "hosted-git-info": "^8.0.0", - "proc-log": "^5.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^6.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm-registry-fetch": { - "version": "18.0.2", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-18.0.2.tgz", - "integrity": "sha512-LeVMZBBVy+oQb5R6FDV9OlJCcWDU+al10oKpe+nsvcHnG24Z3uM3SvJYKfGJlfGjVU8v9liejCrUR/M5HO5NEQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/redact": "^3.0.0", - "jsonparse": "^1.3.1", - "make-fetch-happen": "^14.0.0", - "minipass": "^7.0.2", - "minipass-fetch": "^4.0.0", - "minizlib": "^3.0.1", - "npm-package-arg": "^12.0.0", - "proc-log": "^5.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm-registry-fetch/node_modules/hosted-git-info": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz", - "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==", - "dev": true, - "license": "ISC", - "dependencies": { - "lru-cache": "^10.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm-registry-fetch/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/npm-registry-fetch/node_modules/npm-package-arg": { - "version": "12.0.2", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz", - "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==", - "dev": true, - "license": "ISC", - "dependencies": { - "hosted-git-info": "^8.0.0", - "proc-log": "^5.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^6.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/nth-check": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", - "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "boolbase": "^1.0.0" - }, - "funding": { - "url": "https://github.com/fb55/nth-check?sponsor=1" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-inspect": { - "version": "1.13.4", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", - "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/on-finished": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", - "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", - "dev": true, - "license": "MIT", - "dependencies": { - "ee-first": "1.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, - "license": "ISC", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/onetime": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz", - "integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "mimic-function": "^5.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ora": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/ora/-/ora-8.2.0.tgz", - "integrity": "sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^5.3.0", - "cli-cursor": "^5.0.0", - "cli-spinners": "^2.9.2", - "is-interactive": "^2.0.0", - "is-unicode-supported": "^2.0.0", - "log-symbols": "^6.0.0", - "stdin-discarder": "^0.2.2", - "string-width": "^7.2.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ordered-binary": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/ordered-binary/-/ordered-binary-1.6.0.tgz", - "integrity": "sha512-IQh2aMfMIDbPjI/8a3Edr+PiOpcsB7yo8NdW7aHWVaoR/pcDldunMvnnwbk/auPGqmKeAdxtZl7MHX/QmPwhvQ==", - "dev": true, - "license": "MIT", - "optional": true - }, - "node_modules/p-map": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-7.0.3.tgz", - "integrity": "sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/package-json-from-dist": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", - "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", - "dev": true, - "license": "BlueOak-1.0.0" - }, - "node_modules/pacote": { - "version": "21.0.0", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-21.0.0.tgz", - "integrity": "sha512-lcqexq73AMv6QNLo7SOpz0JJoaGdS3rBFgF122NZVl1bApo2mfu+XzUBU/X/XsiJu+iUmKpekRayqQYAs+PhkA==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/git": "^6.0.0", - "@npmcli/installed-package-contents": "^3.0.0", - "@npmcli/package-json": "^6.0.0", - "@npmcli/promise-spawn": "^8.0.0", - "@npmcli/run-script": "^9.0.0", - "cacache": "^19.0.0", - "fs-minipass": "^3.0.0", - "minipass": "^7.0.2", - "npm-package-arg": "^12.0.0", - "npm-packlist": "^10.0.0", - "npm-pick-manifest": "^10.0.0", - "npm-registry-fetch": "^18.0.0", - "proc-log": "^5.0.0", - "promise-retry": "^2.0.1", - "sigstore": "^3.0.0", - "ssri": "^12.0.0", - "tar": "^6.1.11" - }, - "bin": { - "pacote": "bin/index.js" - }, - "engines": { - "node": "^20.17.0 || >=22.9.0" - } - }, - "node_modules/pacote/node_modules/hosted-git-info": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz", - "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==", - "dev": true, - "license": "ISC", - "dependencies": { - "lru-cache": "^10.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/pacote/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/pacote/node_modules/npm-package-arg": { - "version": "12.0.2", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz", - "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==", - "dev": true, - "license": "ISC", - "dependencies": { - "hosted-git-info": "^8.0.0", - "proc-log": "^5.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^6.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/parse5": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-8.0.0.tgz", - "integrity": "sha512-9m4m5GSgXjL4AjumKzq1Fgfp3Z8rsvjRNbnkVwfu2ImRqE5D0LnY2QfDen18FSY9C573YU5XxSapdHZTZ2WolA==", - "dev": true, - "license": "MIT", - "dependencies": { - "entities": "^6.0.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" - } - }, - "node_modules/parse5-html-rewriting-stream": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/parse5-html-rewriting-stream/-/parse5-html-rewriting-stream-8.0.0.tgz", - "integrity": "sha512-wzh11mj8KKkno1pZEu+l2EVeWsuKDfR5KNWZOTsslfUX8lPDZx77m9T0kIoAVkFtD1nx6YF8oh4BnPHvxMtNMw==", - "dev": true, - "license": "MIT", - "dependencies": { - "entities": "^6.0.0", - "parse5": "^8.0.0", - "parse5-sax-parser": "^8.0.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" - } - }, - "node_modules/parse5-html-rewriting-stream/node_modules/entities": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", - "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/parse5-sax-parser": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/parse5-sax-parser/-/parse5-sax-parser-8.0.0.tgz", - "integrity": "sha512-/dQ8UzHZwnrzs3EvDj6IkKrD/jIZyTlB+8XrHJvcjNgRdmWruNdN9i9RK/JtxakmlUdPwKubKPTCqvbTgzGhrw==", - "dev": true, - "license": "MIT", - "dependencies": { - "parse5": "^8.0.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" - } - }, - "node_modules/parse5/node_modules/entities": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", - "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/parseurl": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", - "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true, - "license": "MIT" - }, - "node_modules/path-scurry": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/path-scurry/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/path-to-regexp": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.3.0.tgz", - "integrity": "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==", - "dev": true, - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "dev": true, - "license": "ISC" - }, - "node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/piscina": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/piscina/-/piscina-5.1.3.tgz", - "integrity": "sha512-0u3N7H4+hbr40KjuVn2uNhOcthu/9usKhnw5vT3J7ply79v3D3M8naI00el9Klcy16x557VsEkkUQaHCWFXC/g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=20.x" - }, - "optionalDependencies": { - "@napi-rs/nice": "^1.0.4" - } - }, - "node_modules/pkce-challenge": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", - "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=16.20.0" - } - }, - "node_modules/postcss": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", - "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "nanoid": "^3.3.11", - "picocolors": "^1.1.1", - "source-map-js": "^1.2.1" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/postcss-media-query-parser": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/postcss-media-query-parser/-/postcss-media-query-parser-0.2.3.tgz", - "integrity": "sha512-3sOlxmbKcSHMjlUXQZKQ06jOswE7oVkXPxmZdoB1r5l0q6gTFTQSHxNxOrCccElbW7dxNytifNEo8qidX2Vsig==", - "dev": true, - "license": "MIT" - }, - "node_modules/proc-log": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-5.0.0.tgz", - "integrity": "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/promise-retry": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", - "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", - "dev": true, - "license": "MIT", - "dependencies": { - "err-code": "^2.0.2", - "retry": "^0.12.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/proxy-addr": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", - "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", - "dev": true, - "license": "MIT", - "dependencies": { - "forwarded": "0.2.0", - "ipaddr.js": "1.9.1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/qjobs": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/qjobs/-/qjobs-1.2.0.tgz", - "integrity": "sha512-8YOJEHtxpySA3fFDyCRxA+UUV+fA+rTWnuWvylOK/NCjhY+b4ocCtmu8TtsWb+mYeU+GCHf/S66KZF/AsteKHg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.9" - } - }, - "node_modules/qs": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", - "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "side-channel": "^1.1.0" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/range-parser": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", - "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/raw-body": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.1.tgz", - "integrity": "sha512-9G8cA+tuMS75+6G/TzW8OtLzmBDMo8p1JRxN5AZ+LAp8uxGA8V8GZm4GQ4/N5QNQEnLmg6SS7wyuSmbKepiKqA==", - "dev": true, - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.7.0", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/readdirp": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", - "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14.18.0" - }, - "funding": { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/reflect-metadata": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.2.2.tgz", - "integrity": "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q==", - "dev": true, - "license": "Apache-2.0" - }, - "node_modules/require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/require-from-string": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", - "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/requires-port": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", - "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/resolve": { - "version": "1.22.10", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", - "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-core-module": "^2.16.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/restore-cursor": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz", - "integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==", - "dev": true, - "license": "MIT", - "dependencies": { - "onetime": "^7.0.0", - "signal-exit": "^4.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/retry": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", - "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/rfdc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", - "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", - "dev": true, - "license": "MIT" - }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "deprecated": "Rimraf versions prior to v4 are no longer supported", - "dev": true, - "license": "ISC", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/rollup": { - "version": "4.52.3", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.52.3.tgz", - "integrity": "sha512-RIDh866U8agLgiIcdpB+COKnlCreHJLfIhWC3LVflku5YHfpnsIKigRZeFfMfCc4dVcqNVfQQ5gO/afOck064A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/estree": "1.0.8" - }, - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=18.0.0", - "npm": ">=8.0.0" - }, - "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.52.3", - "@rollup/rollup-android-arm64": "4.52.3", - "@rollup/rollup-darwin-arm64": "4.52.3", - "@rollup/rollup-darwin-x64": "4.52.3", - "@rollup/rollup-freebsd-arm64": "4.52.3", - "@rollup/rollup-freebsd-x64": "4.52.3", - "@rollup/rollup-linux-arm-gnueabihf": "4.52.3", - "@rollup/rollup-linux-arm-musleabihf": "4.52.3", - "@rollup/rollup-linux-arm64-gnu": "4.52.3", - "@rollup/rollup-linux-arm64-musl": "4.52.3", - "@rollup/rollup-linux-loong64-gnu": "4.52.3", - "@rollup/rollup-linux-ppc64-gnu": "4.52.3", - "@rollup/rollup-linux-riscv64-gnu": "4.52.3", - "@rollup/rollup-linux-riscv64-musl": "4.52.3", - "@rollup/rollup-linux-s390x-gnu": "4.52.3", - "@rollup/rollup-linux-x64-gnu": "4.52.3", - "@rollup/rollup-linux-x64-musl": "4.52.3", - "@rollup/rollup-openharmony-arm64": "4.52.3", - "@rollup/rollup-win32-arm64-msvc": "4.52.3", - "@rollup/rollup-win32-ia32-msvc": "4.52.3", - "@rollup/rollup-win32-x64-gnu": "4.52.3", - "@rollup/rollup-win32-x64-msvc": "4.52.3", - "fsevents": "~2.3.2" - } - }, - "node_modules/router": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", - "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.4.0", - "depd": "^2.0.0", - "is-promise": "^4.0.0", - "parseurl": "^1.3.3", - "path-to-regexp": "^8.0.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/rxjs": { - "version": "7.8.2", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", - "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", - "license": "Apache-2.0", - "peer": true, - "dependencies": { - "tslib": "^2.1.0" - } - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/safe-regex-test": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", - "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "is-regex": "^1.2.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true, - "license": "MIT" - }, - "node_modules/sass": { - "version": "1.90.0", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.90.0.tgz", - "integrity": "sha512-9GUyuksjw70uNpb1MTYWsH9MQHOHY6kwfnkafC24+7aOMZn9+rVMBxRbLvw756mrBFbIsFg6Xw9IkR2Fnn3k+Q==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "chokidar": "^4.0.0", - "immutable": "^5.0.2", - "source-map-js": ">=0.6.2 <2.0.0" - }, - "bin": { - "sass": "sass.js" - }, - "engines": { - "node": ">=14.0.0" - }, - "optionalDependencies": { - "@parcel/watcher": "^2.4.1" - } - }, - "node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/send": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", - "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.3.5", - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "etag": "^1.8.1", - "fresh": "^2.0.0", - "http-errors": "^2.0.0", - "mime-types": "^3.0.1", - "ms": "^2.1.3", - "on-finished": "^2.4.1", - "range-parser": "^1.2.1", - "statuses": "^2.0.1" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/serve-static": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", - "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "parseurl": "^1.3.3", - "send": "^1.2.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/setprototypeof": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", - "dev": true, - "license": "ISC" - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/side-channel": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", - "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3", - "side-channel-list": "^1.0.0", - "side-channel-map": "^1.0.1", - "side-channel-weakmap": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-list": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", - "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-map": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", - "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-weakmap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", - "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3", - "side-channel-map": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/sigstore": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-3.1.0.tgz", - "integrity": "sha512-ZpzWAFHIFqyFE56dXqgX/DkDRZdz+rRcjoIk/RQU4IX0wiCv1l8S7ZrXDHcCc+uaf+6o7w3h2l3g6GYG5TKN9Q==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^3.1.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.4.0", - "@sigstore/sign": "^3.1.0", - "@sigstore/tuf": "^3.1.0", - "@sigstore/verify": "^2.1.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/slice-ansi": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-5.0.0.tgz", - "integrity": "sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.0.0", - "is-fullwidth-code-point": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/slice-ansi?sponsor=1" - } - }, - "node_modules/smart-buffer": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", - "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 6.0.0", - "npm": ">= 3.0.0" - } - }, - "node_modules/socket.io": { - "version": "4.8.1", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.1.tgz", - "integrity": "sha512-oZ7iUCxph8WYRHHcjBEc9unw3adt5CmSNlppj/5Q4k2RIrhl8Z5yY2Xr4j9zj0+wzVZ0bxmYoGSzKJnRl6A4yg==", - "dev": true, - "license": "MIT", - "dependencies": { - "accepts": "~1.3.4", - "base64id": "~2.0.0", - "cors": "~2.8.5", - "debug": "~4.3.2", - "engine.io": "~6.6.0", - "socket.io-adapter": "~2.5.2", - "socket.io-parser": "~4.2.4" - }, - "engines": { - "node": ">=10.2.0" - } - }, - "node_modules/socket.io-adapter": { - "version": "2.5.5", - "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.5.5.tgz", - "integrity": "sha512-eLDQas5dzPgOWCk9GuuJC2lBqItuhKI4uxGgo9aIV7MYbk2h9Q6uULEh8WBzThoI7l+qU9Ast9fVUmkqPP9wYg==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "~4.3.4", - "ws": "~8.17.1" - } - }, - "node_modules/socket.io-adapter/node_modules/debug": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", - "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/socket.io-parser": { - "version": "4.2.4", - "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-4.2.4.tgz", - "integrity": "sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew==", - "dev": true, - "license": "MIT", - "dependencies": { - "@socket.io/component-emitter": "~3.1.0", - "debug": "~4.3.1" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/socket.io-parser/node_modules/debug": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", - "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/socket.io/node_modules/accepts": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", - "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", - "dev": true, - "license": "MIT", - "dependencies": { - "mime-types": "~2.1.34", - "negotiator": "0.6.3" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/socket.io/node_modules/debug": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", - "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/socket.io/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/socket.io/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dev": true, - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/socket.io/node_modules/negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/socks": { - "version": "2.8.7", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.7.tgz", - "integrity": "sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ip-address": "^10.0.1", - "smart-buffer": "^4.2.0" - }, - "engines": { - "node": ">= 10.0.0", - "npm": ">= 3.0.0" - } - }, - "node_modules/socks-proxy-agent": { - "version": "8.0.5", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.5.tgz", - "integrity": "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.1.2", - "debug": "^4.3.4", - "socks": "^2.8.3" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/source-map": { - "version": "0.7.6", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.6.tgz", - "integrity": "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">= 12" - } - }, - "node_modules/source-map-js": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", - "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map-support": { - "version": "0.5.21", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", - "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "node_modules/source-map-support/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/spdx-correct": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", - "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", - "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", - "dev": true, - "license": "CC-BY-3.0" - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", - "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.22", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz", - "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==", - "dev": true, - "license": "CC0-1.0" - }, - "node_modules/ssri": { - "version": "12.0.0", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-12.0.0.tgz", - "integrity": "sha512-S7iGNosepx9RadX82oimUkvr0Ct7IjJbEbs4mJcTxst8um95J3sDYU1RBEOvdu6oL1Wek2ODI5i4MAw+dZ6cAQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^7.0.3" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/statuses": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", - "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/stdin-discarder": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.2.2.tgz", - "integrity": "sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/streamroller": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/streamroller/-/streamroller-3.1.5.tgz", - "integrity": "sha512-KFxaM7XT+irxvdqSP1LGLgNWbYN7ay5owZ3r/8t77p+EtSUAfUgtl7be3xtqtOmGUl9K9YPO2ca8133RlTjvKw==", - "dev": true, - "license": "MIT", - "dependencies": { - "date-format": "^4.0.14", - "debug": "^4.3.4", - "fs-extra": "^8.1.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/string-width": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", - "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^10.3.0", - "get-east-asian-width": "^1.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/string-width-cjs/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", - "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "dev": true, - "license": "ISC", - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/tar/node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/tar/node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/tar/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=8" - } - }, - "node_modules/tar/node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "dev": true, - "license": "MIT", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/tar/node_modules/minizlib/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/tar/node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "dev": true, - "license": "MIT", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/tar/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true, - "license": "ISC" - }, - "node_modules/tinyglobby": { - "version": "0.2.14", - "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.14.tgz", - "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "fdir": "^6.4.4", - "picomatch": "^4.0.2" - }, - "engines": { - "node": ">=12.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/SuperchupuDev" - } - }, - "node_modules/tmp": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", - "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14.14" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/toidentifier": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", - "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.6" - } - }, - "node_modules/tslib": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", - "license": "0BSD", - "peer": true - }, - "node_modules/tuf-js": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-3.1.0.tgz", - "integrity": "sha512-3T3T04WzowbwV2FDiGXBbr81t64g1MUGGJRgT4x5o97N+8ArdhVCAF9IxFrxuSJmM3E5Asn7nKHkao0ibcZXAg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@tufjs/models": "3.0.1", - "debug": "^4.4.1", - "make-fetch-happen": "^14.0.3" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/type-is": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", - "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", - "dev": true, - "license": "MIT", - "dependencies": { - "content-type": "^1.0.5", - "media-typer": "^1.1.0", - "mime-types": "^3.0.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/typescript": { - "version": "5.9.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", - "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", - "dev": true, - "license": "Apache-2.0", - "peer": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/ua-parser-js": { - "version": "0.7.41", - "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.41.tgz", - "integrity": "sha512-O3oYyCMPYgNNHuO7Jjk3uacJWZF8loBgwrfd/5LE/HyZ3lUIOdniQ7DNXJcIgZbwioZxk0fLfI4EVnetdiX5jg==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/ua-parser-js" - }, - { - "type": "paypal", - "url": "https://paypal.me/faisalman" - }, - { - "type": "github", - "url": "https://github.com/sponsors/faisalman" - } - ], - "license": "MIT", - "bin": { - "ua-parser-js": "script/cli.js" - }, - "engines": { - "node": "*" - } - }, - "node_modules/undici-types": { - "version": "7.16.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", - "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", - "dev": true, - "license": "MIT" - }, - "node_modules/unique-filename": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-4.0.0.tgz", - "integrity": "sha512-XSnEewXmQ+veP7xX2dS5Q4yZAvO40cBN2MWkJ7D/6sW4Dg6wYBNwM1Vrnz1FhH5AdeLIlUXRI9e28z1YZi71NQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "unique-slug": "^5.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/unique-slug": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-5.0.0.tgz", - "integrity": "sha512-9OdaqO5kwqR+1kVgHAhsp5vPNU0hnxRa26rBFNfNgM7M6pNtgzeBn3s/xbyCQL3dcjzOatcef6UUHpB/6MaETg==", - "dev": true, - "license": "ISC", - "dependencies": { - "imurmurhash": "^0.1.4" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/universalify": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", - "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4.0.0" - } - }, - "node_modules/unpipe": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/update-browserslist-db": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.4.tgz", - "integrity": "sha512-q0SPT4xyU84saUX+tomz1WLkxUbuaJnR1xWt17M7fJtEJigJeWUNGUqrauFXsHnqev9y9JTRGwk13tFBuKby4A==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "escalade": "^3.2.0", - "picocolors": "^1.1.1" - }, - "bin": { - "update-browserslist-db": "cli.js" - }, - "peerDependencies": { - "browserslist": ">= 4.21.0" - } - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/uri-js/node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/utils-merge": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "node_modules/validate-npm-package-name": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-6.0.2.tgz", - "integrity": "sha512-IUoow1YUtvoBBC06dXs8bR8B9vuA3aJfmQNKMoaPG/OFsPmoQvw8xh+6Ye25Gx9DQhoEom3Pcu9MKHerm/NpUQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/vite": { - "version": "7.1.11", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.11.tgz", - "integrity": "sha512-uzcxnSDVjAopEUjljkWh8EIrg6tlzrjFUfMcR1EVsRDGwf/ccef0qQPRyOrROwhrTDaApueq+ja+KLPlzR/zdg==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "esbuild": "^0.25.0", - "fdir": "^6.5.0", - "picomatch": "^4.0.3", - "postcss": "^8.5.6", - "rollup": "^4.43.0", - "tinyglobby": "^0.2.15" - }, - "bin": { - "vite": "bin/vite.js" - }, - "engines": { - "node": "^20.19.0 || >=22.12.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^20.19.0 || >=22.12.0", - "jiti": ">=1.21.0", - "less": "^4.0.0", - "lightningcss": "^1.21.0", - "sass": "^1.70.0", - "sass-embedded": "^1.70.0", - "stylus": ">=0.54.8", - "sugarss": "^5.0.0", - "terser": "^5.16.0", - "tsx": "^4.8.1", - "yaml": "^2.4.2" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "jiti": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - }, - "tsx": { - "optional": true - }, - "yaml": { - "optional": true - } - } - }, - "node_modules/vite/node_modules/tinyglobby": { - "version": "0.2.15", - "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", - "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "fdir": "^6.5.0", - "picomatch": "^4.0.3" - }, - "engines": { - "node": ">=12.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/SuperchupuDev" - } - }, - "node_modules/void-elements": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-2.0.1.tgz", - "integrity": "sha512-qZKX4RnBzH2ugr8Lxa7x+0V6XD9Sb/ouARtiasEQCHB1EVU4NXtmHsDDrx1dO4ne5fc3J6EW05BP1Dl0z0iung==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/watchpack": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.4.tgz", - "integrity": "sha512-c5EGNOiyxxV5qmTtAB7rbiXxi1ooX1pQKMLX/MIabJjRA0SJBQOjKF+KSVfHkr9U1cADPon0mRiVe/riyaiDUA==", - "dev": true, - "license": "MIT", - "dependencies": { - "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.1.2" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/weak-lru-cache": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/weak-lru-cache/-/weak-lru-cache-1.2.2.tgz", - "integrity": "sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw==", - "dev": true, - "license": "MIT", - "optional": true - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/wrap-ansi-cjs/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrap-ansi/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/wrap-ansi/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/ws": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", - "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, - "node_modules/y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true, - "license": "ISC" - }, - "node_modules/yargs": { - "version": "18.0.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-18.0.0.tgz", - "integrity": "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cliui": "^9.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "string-width": "^7.2.0", - "y18n": "^5.0.5", - "yargs-parser": "^22.0.0" - }, - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=23" - } - }, - "node_modules/yargs-parser": { - "version": "22.0.0", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-22.0.0.tgz", - "integrity": "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^20.19.0 || ^22.12.0 || >=23" - } - }, - "node_modules/yoctocolors-cjs": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.3.tgz", - "integrity": "sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/zod": { - "version": "3.25.76", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", - "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", - "dev": true, - "license": "MIT", - "peer": true, - "funding": { - "url": "https://github.com/sponsors/colinhacks" - } - }, - "node_modules/zod-to-json-schema": { - "version": "3.24.6", - "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz", - "integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==", - "dev": true, - "license": "ISC", - "peerDependencies": { - "zod": "^3.24.1" - } - }, - "node_modules/zone.js": { - "version": "0.15.1", - "resolved": "https://registry.npmjs.org/zone.js/-/zone.js-0.15.1.tgz", - "integrity": "sha512-XE96n56IQpJM7NAoXswY3XRLcWFW83xe0BiAOeMD7K5k5xecOeul3Qcpx6GqEeeHNkW5DWL5zOyTbEfB4eti8w==", - "license": "MIT", - "peer": true - } - } -} diff --git a/pkg/updater/ui/package.json b/pkg/updater/ui/package.json deleted file mode 100644 index da092d4..0000000 --- a/pkg/updater/ui/package.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "name": "core-element-template", - "version": "0.0.0", - "scripts": { - "ng": "ng", - "start": "ng serve", - "build": "ng build", - "watch": "ng build --watch --configuration development", - "test": "ng test" - }, - "prettier": { - "printWidth": 100, - "singleQuote": true, - "overrides": [ - { - "files": "*.html", - "options": { - "parser": "angular" - } - } - ] - }, - "private": true, - "dependencies": { - "@angular/common": "^20.3.0", - "@angular/compiler": "^20.3.0", - "@angular/core": "^20.3.0", - "@angular/elements": "^20.3.10", - "@angular/forms": "^20.3.0", - "@angular/platform-browser": "^20.3.0", - "@angular/router": "^20.3.0", - "rxjs": "~7.8.0", - "tslib": "^2.3.0", - "zone.js": "~0.15.0" - }, - "devDependencies": { - "@angular/build": "^20.3.9", - "@angular/cli": "^20.3.9", - "@angular/compiler-cli": "^20.3.0", - "@types/jasmine": "~5.1.0", - "jasmine-core": "~5.9.0", - "karma": "~6.4.0", - "karma-chrome-launcher": "~3.2.0", - "karma-coverage": "~2.2.0", - "karma-jasmine": "~5.1.0", - "karma-jasmine-html-reporter": "~2.1.0", - "typescript": "~5.9.2" - } -} diff --git a/pkg/updater/ui/public/favicon.ico b/pkg/updater/ui/public/favicon.ico deleted file mode 100644 index 57614f9..0000000 Binary files a/pkg/updater/ui/public/favicon.ico and /dev/null differ diff --git a/pkg/updater/ui/src/app/app-module.ts b/pkg/updater/ui/src/app/app-module.ts deleted file mode 100644 index 444aeff..0000000 --- a/pkg/updater/ui/src/app/app-module.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { DoBootstrap, Injector, NgModule, provideBrowserGlobalErrorListeners } from '@angular/core'; -import { BrowserModule } from '@angular/platform-browser'; -import { createCustomElement } from '@angular/elements'; - -import { App } from './app'; - -@NgModule({ - imports: [ - BrowserModule, - App - ], - providers: [ - provideBrowserGlobalErrorListeners() - ] -}) -export class AppModule implements DoBootstrap { - constructor(private injector: Injector) { - const el = createCustomElement(App, { injector }); - customElements.define('core-element-template', el); - } - - ngDoBootstrap() {} -} diff --git a/pkg/updater/ui/src/app/app.html b/pkg/updater/ui/src/app/app.html deleted file mode 100644 index ef43ee8..0000000 --- a/pkg/updater/ui/src/app/app.html +++ /dev/null @@ -1 +0,0 @@ -

Hello, {{ title() }}

diff --git a/pkg/updater/ui/src/app/app.ts b/pkg/updater/ui/src/app/app.ts deleted file mode 100644 index b5a4c30..0000000 --- a/pkg/updater/ui/src/app/app.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { Component, signal } from '@angular/core'; - -@Component({ - selector: 'core-element-template', - templateUrl: './app.html', - standalone: true -}) -export class App { - protected readonly title = signal('core-element-template'); -} diff --git a/pkg/updater/ui/src/index.html b/pkg/updater/ui/src/index.html deleted file mode 100644 index f2153c9..0000000 --- a/pkg/updater/ui/src/index.html +++ /dev/null @@ -1,13 +0,0 @@ - - - - - CoreElementTemplate - - - - - - - - diff --git a/pkg/updater/ui/src/main.ts b/pkg/updater/ui/src/main.ts deleted file mode 100644 index 40c6c68..0000000 --- a/pkg/updater/ui/src/main.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { platformBrowser } from '@angular/platform-browser'; -import { AppModule } from './app/app-module'; - -platformBrowser().bootstrapModule(AppModule, { - ngZoneEventCoalescing: true, -}) - .catch(err => console.error(err)); diff --git a/pkg/updater/ui/src/styles.css b/pkg/updater/ui/src/styles.css deleted file mode 100644 index 90d4ee0..0000000 --- a/pkg/updater/ui/src/styles.css +++ /dev/null @@ -1 +0,0 @@ -/* You can add global styles to this file, and also import other style files */ diff --git a/pkg/updater/ui/tsconfig.app.json b/pkg/updater/ui/tsconfig.app.json deleted file mode 100644 index 264f459..0000000 --- a/pkg/updater/ui/tsconfig.app.json +++ /dev/null @@ -1,15 +0,0 @@ -/* To learn more about Typescript configuration file: https://www.typescriptlang.org/docs/handbook/tsconfig-json.html. */ -/* To learn more about Angular compiler options: https://angular.dev/reference/configs/angular-compiler-options. */ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "./out-tsc/app", - "types": [] - }, - "include": [ - "src/**/*.ts" - ], - "exclude": [ - "src/**/*.spec.ts" - ] -} diff --git a/pkg/updater/ui/tsconfig.json b/pkg/updater/ui/tsconfig.json deleted file mode 100644 index e4955f2..0000000 --- a/pkg/updater/ui/tsconfig.json +++ /dev/null @@ -1,34 +0,0 @@ -/* To learn more about Typescript configuration file: https://www.typescriptlang.org/docs/handbook/tsconfig-json.html. */ -/* To learn more about Angular compiler options: https://angular.dev/reference/configs/angular-compiler-options. */ -{ - "compileOnSave": false, - "compilerOptions": { - "strict": true, - "noImplicitOverride": true, - "noPropertyAccessFromIndexSignature": true, - "noImplicitReturns": true, - "noFallthroughCasesInSwitch": true, - "skipLibCheck": true, - "isolatedModules": true, - "experimentalDecorators": true, - "importHelpers": true, - "target": "ES2022", - "module": "preserve" - }, - "angularCompilerOptions": { - "enableI18nLegacyMessageIdFormat": false, - "strictInjectionParameters": true, - "strictInputAccessModifiers": true, - "typeCheckHostBindings": true, - "strictTemplates": true - }, - "files": [], - "references": [ - { - "path": "./tsconfig.app.json" - }, - { - "path": "./tsconfig.spec.json" - } - ] -} diff --git a/pkg/updater/ui/tsconfig.spec.json b/pkg/updater/ui/tsconfig.spec.json deleted file mode 100644 index 04df34c..0000000 --- a/pkg/updater/ui/tsconfig.spec.json +++ /dev/null @@ -1,14 +0,0 @@ -/* To learn more about Typescript configuration file: https://www.typescriptlang.org/docs/handbook/tsconfig-json.html. */ -/* To learn more about Angular compiler options: https://angular.dev/reference/configs/angular-compiler-options. */ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "./out-tsc/spec", - "types": [ - "jasmine" - ] - }, - "include": [ - "src/**/*.ts" - ] -} diff --git a/pkg/updater/updater.go b/pkg/updater/updater.go deleted file mode 100644 index 69929c4..0000000 --- a/pkg/updater/updater.go +++ /dev/null @@ -1,237 +0,0 @@ -package updater - -import ( - "context" - "fmt" - "io" - "net/http" - "strings" - - "github.com/minio/selfupdate" - "golang.org/x/mod/semver" -) - -// Version holds the current version of the application. -// It is set at build time via ldflags or fallback to the version in package.json. -var Version = PkgVersion - -// NewGithubClient is a variable that holds a function to create a new GithubClient. -// This can be replaced in tests to inject a mock client. -// -// Example: -// -// updater.NewGithubClient = func() updater.GithubClient { -// return &mockClient{} // or your mock implementation -// } -var NewGithubClient = func() GithubClient { - return &githubClient{} -} - -// DoUpdate is a variable that holds the function to perform the actual update. -// This can be replaced in tests to prevent actual updates. -var DoUpdate = func(url string) error { - resp, err := http.Get(url) - if err != nil { - return err - } - defer func(Body io.ReadCloser) { - err := Body.Close() - if err != nil { - fmt.Printf("failed to close response body: %v\n", err) - } - }(resp.Body) - - err = selfupdate.Apply(resp.Body, selfupdate.Options{}) - if err != nil { - if rerr := selfupdate.RollbackError(err); rerr != nil { - return fmt.Errorf("failed to rollback from failed update: %v", rerr) - } - return fmt.Errorf("update failed: %v", err) - } - - fmt.Println("Update applied successfully.") - return nil -} - -// CheckForNewerVersion checks if a newer version of the application is available on GitHub. -// It fetches the latest release for the given owner, repository, and channel, and compares its tag -// with the current application version. -var CheckForNewerVersion = func(owner, repo, channel string, forceSemVerPrefix bool) (*Release, bool, error) { - client := NewGithubClient() - ctx := context.Background() - - release, err := client.GetLatestRelease(ctx, owner, repo, channel) - if err != nil { - return nil, false, fmt.Errorf("error fetching latest release: %w", err) - } - - if release == nil { - return nil, false, nil // No release found - } - - // Always normalize to 'v' prefix for semver comparison - vCurrent := formatVersionForComparison(Version) - vLatest := formatVersionForComparison(release.TagName) - - if semver.Compare(vCurrent, vLatest) >= 0 { - return release, false, nil // Current version is up-to-date or newer - } - - return release, true, nil // A newer version is available -} - -// CheckForUpdates checks for new updates on GitHub and applies them if a newer version is found. -// It uses the provided owner, repository, and channel to find the latest release. -var CheckForUpdates = func(owner, repo, channel string, forceSemVerPrefix bool, releaseURLFormat string) error { - release, updateAvailable, err := CheckForNewerVersion(owner, repo, channel, forceSemVerPrefix) - if err != nil { - return err - } - - if !updateAvailable { - if release != nil { - fmt.Printf("Current version %s is up-to-date with latest release %s.\n", - formatVersionForDisplay(Version, forceSemVerPrefix), - formatVersionForDisplay(release.TagName, forceSemVerPrefix)) - } else { - fmt.Println("No releases found.") - } - return nil - } - - fmt.Printf("Newer version %s found (current: %s). Applying update...\n", - formatVersionForDisplay(release.TagName, forceSemVerPrefix), - formatVersionForDisplay(Version, forceSemVerPrefix)) - - downloadURL, err := GetDownloadURL(release, releaseURLFormat) - if err != nil { - return fmt.Errorf("error getting download URL: %w", err) - } - - return DoUpdate(downloadURL) -} - -// CheckOnly checks for new updates on GitHub without applying them. -// It prints a message indicating if a new release is available. -var CheckOnly = func(owner, repo, channel string, forceSemVerPrefix bool, releaseURLFormat string) error { - release, updateAvailable, err := CheckForNewerVersion(owner, repo, channel, forceSemVerPrefix) - if err != nil { - return err - } - - if !updateAvailable { - if release != nil { - fmt.Printf("Current version %s is up-to-date with latest release %s.\n", - formatVersionForDisplay(Version, forceSemVerPrefix), - formatVersionForDisplay(release.TagName, forceSemVerPrefix)) - } else { - fmt.Println("No new release found.") - } - return nil - } - - fmt.Printf("New release found: %s (current version: %s)\n", - formatVersionForDisplay(release.TagName, forceSemVerPrefix), - formatVersionForDisplay(Version, forceSemVerPrefix)) - return nil -} - -// CheckForUpdatesByTag checks for and applies updates from GitHub based on the channel -// determined by the current application's version tag (e.g., 'stable' or 'prerelease'). -var CheckForUpdatesByTag = func(owner, repo string) error { - channel := determineChannel(Version, false) // isPreRelease is false for current version - return CheckForUpdates(owner, repo, channel, true, "") -} - -// CheckOnlyByTag checks for updates from GitHub based on the channel determined by the -// current version tag, without applying them. -var CheckOnlyByTag = func(owner, repo string) error { - channel := determineChannel(Version, false) // isPreRelease is false for current version - return CheckOnly(owner, repo, channel, true, "") -} - -// CheckForUpdatesByPullRequest finds a release associated with a specific pull request number -// on GitHub and applies the update. -var CheckForUpdatesByPullRequest = func(owner, repo string, prNumber int, releaseURLFormat string) error { - client := NewGithubClient() - ctx := context.Background() - - release, err := client.GetReleaseByPullRequest(ctx, owner, repo, prNumber) - if err != nil { - return fmt.Errorf("error fetching release for pull request: %w", err) - } - - if release == nil { - fmt.Printf("No release found for PR #%d.\n", prNumber) - return nil - } - - fmt.Printf("Release %s found for PR #%d. Applying update...\n", release.TagName, prNumber) - - downloadURL, err := GetDownloadURL(release, releaseURLFormat) - if err != nil { - return fmt.Errorf("error getting download URL: %w", err) - } - - return DoUpdate(downloadURL) -} - -// CheckForUpdatesHTTP checks for and applies updates from a generic HTTP endpoint. -// The endpoint is expected to provide update information in a structured format. -var CheckForUpdatesHTTP = func(baseURL string) error { - info, err := GetLatestUpdateFromURL(baseURL) - if err != nil { - return err - } - - vCurrent := formatVersionForComparison(Version) - vLatest := formatVersionForComparison(info.Version) - - if semver.Compare(vCurrent, vLatest) >= 0 { - fmt.Printf("Current version %s is up-to-date with latest release %s.\n", Version, info.Version) - return nil - } - - fmt.Printf("Newer version %s found (current: %s). Applying update...\n", info.Version, Version) - return DoUpdate(info.URL) -} - -// CheckOnlyHTTP checks for updates from a generic HTTP endpoint without applying them. -// It prints a message if a new version is available. -var CheckOnlyHTTP = func(baseURL string) error { - info, err := GetLatestUpdateFromURL(baseURL) - if err != nil { - return err - } - - vCurrent := formatVersionForComparison(Version) - vLatest := formatVersionForComparison(info.Version) - - if semver.Compare(vCurrent, vLatest) >= 0 { - fmt.Printf("Current version %s is up-to-date with latest release %s.\n", Version, info.Version) - return nil - } - - fmt.Printf("New release found: %s (current version: %s)\n", info.Version, Version) - return nil -} - -// formatVersionForComparison ensures the version string has a 'v' prefix for semver comparison. -func formatVersionForComparison(version string) string { - if version != "" && !strings.HasPrefix(version, "v") { - return "v" + version - } - return version -} - -// formatVersionForDisplay ensures the version string has the correct 'v' prefix based on the forceSemVerPrefix flag. -func formatVersionForDisplay(version string, forceSemVerPrefix bool) string { - hasV := strings.HasPrefix(version, "v") - if forceSemVerPrefix && !hasV { - return "v" + version - } - if !forceSemVerPrefix && hasV { - return strings.TrimPrefix(version, "v") - } - return version -} diff --git a/pkg/updater/updater_test.go b/pkg/updater/updater_test.go deleted file mode 100644 index dfb5668..0000000 --- a/pkg/updater/updater_test.go +++ /dev/null @@ -1,261 +0,0 @@ -package updater - -import ( - "context" - "fmt" - "log" - "net/http" - "net/http/httptest" - "runtime" -) - -// mockGithubClient is a mock implementation of the GithubClient interface for testing. -type mockGithubClient struct { - getLatestRelease func(ctx context.Context, owner, repo, channel string) (*Release, error) - getReleaseByPR func(ctx context.Context, owner, repo string, prNumber int) (*Release, error) - getPublicRepos func(ctx context.Context, userOrOrg string) ([]string, error) - getLatestReleaseCount int - getReleaseByPRCount int - getPublicReposCount int -} - -func (m *mockGithubClient) GetLatestRelease(ctx context.Context, owner, repo, channel string) (*Release, error) { - m.getLatestReleaseCount++ - return m.getLatestRelease(ctx, owner, repo, channel) -} - -func (m *mockGithubClient) GetReleaseByPullRequest(ctx context.Context, owner, repo string, prNumber int) (*Release, error) { - m.getReleaseByPRCount++ - return m.getReleaseByPR(ctx, owner, repo, prNumber) -} - -func (m *mockGithubClient) GetPublicRepos(ctx context.Context, userOrOrg string) ([]string, error) { - m.getPublicReposCount++ - if m.getPublicRepos != nil { - return m.getPublicRepos(ctx, userOrOrg) - } - return nil, fmt.Errorf("GetPublicRepos not implemented") -} - -func ExampleCheckForNewerVersion() { - originalNewGithubClient := NewGithubClient - defer func() { NewGithubClient = originalNewGithubClient }() - - NewGithubClient = func() GithubClient { - return &mockGithubClient{ - getLatestRelease: func(ctx context.Context, owner, repo, channel string) (*Release, error) { - return &Release{TagName: "v1.1.0"}, nil - }, - } - } - - Version = "1.0.0" - release, available, err := CheckForNewerVersion("owner", "repo", "stable", true) - if err != nil { - log.Fatalf("CheckForNewerVersion failed: %v", err) - } - - if available { - fmt.Printf("Newer version available: %s", release.TagName) - } else { - fmt.Println("No newer version available.") - } - // Output: Newer version available: v1.1.0 -} - -func ExampleCheckForUpdates() { - // Mock the functions to prevent actual updates and network calls - originalDoUpdate := DoUpdate - originalNewGithubClient := NewGithubClient - defer func() { - DoUpdate = originalDoUpdate - NewGithubClient = originalNewGithubClient - }() - - NewGithubClient = func() GithubClient { - return &mockGithubClient{ - getLatestRelease: func(ctx context.Context, owner, repo, channel string) (*Release, error) { - return &Release{ - TagName: "v1.1.0", - Assets: []ReleaseAsset{{Name: fmt.Sprintf("test-asset-%s-%s", runtime.GOOS, runtime.GOARCH), DownloadURL: "http://example.com/asset"}}, - }, nil - }, - } - } - - DoUpdate = func(url string) error { - fmt.Printf("Update would be applied from: %s", url) - return nil - } - - Version = "1.0.0" - err := CheckForUpdates("owner", "repo", "stable", true, "") - if err != nil { - log.Fatalf("CheckForUpdates failed: %v", err) - } - // Output: - // Newer version v1.1.0 found (current: v1.0.0). Applying update... - // Update would be applied from: http://example.com/asset -} - -func ExampleCheckOnly() { - originalNewGithubClient := NewGithubClient - defer func() { NewGithubClient = originalNewGithubClient }() - - NewGithubClient = func() GithubClient { - return &mockGithubClient{ - getLatestRelease: func(ctx context.Context, owner, repo, channel string) (*Release, error) { - return &Release{TagName: "v1.1.0"}, nil - }, - } - } - - Version = "1.0.0" - err := CheckOnly("owner", "repo", "stable", true, "") - if err != nil { - log.Fatalf("CheckOnly failed: %v", err) - } - // Output: New release found: v1.1.0 (current version: v1.0.0) -} - -func ExampleCheckForUpdatesByTag() { - // Mock the functions to prevent actual updates and network calls - originalDoUpdate := DoUpdate - originalNewGithubClient := NewGithubClient - defer func() { - DoUpdate = originalDoUpdate - NewGithubClient = originalNewGithubClient - }() - - NewGithubClient = func() GithubClient { - return &mockGithubClient{ - getLatestRelease: func(ctx context.Context, owner, repo, channel string) (*Release, error) { - if channel == "stable" { - return &Release{ - TagName: "v1.1.0", - Assets: []ReleaseAsset{{Name: fmt.Sprintf("test-asset-%s-%s", runtime.GOOS, runtime.GOARCH), DownloadURL: "http://example.com/asset"}}, - }, nil - } - return nil, nil - }, - } - } - - DoUpdate = func(url string) error { - fmt.Printf("Update would be applied from: %s", url) - return nil - } - - Version = "1.0.0" // A version that resolves to the "stable" channel - err := CheckForUpdatesByTag("owner", "repo") - if err != nil { - log.Fatalf("CheckForUpdatesByTag failed: %v", err) - } - // Output: - // Newer version v1.1.0 found (current: v1.0.0). Applying update... - // Update would be applied from: http://example.com/asset -} - -func ExampleCheckOnlyByTag() { - originalNewGithubClient := NewGithubClient - defer func() { NewGithubClient = originalNewGithubClient }() - - NewGithubClient = func() GithubClient { - return &mockGithubClient{ - getLatestRelease: func(ctx context.Context, owner, repo, channel string) (*Release, error) { - if channel == "stable" { - return &Release{TagName: "v1.1.0"}, nil - } - return nil, nil - }, - } - } - - Version = "1.0.0" // A version that resolves to the "stable" channel - err := CheckOnlyByTag("owner", "repo") - if err != nil { - log.Fatalf("CheckOnlyByTag failed: %v", err) - } - // Output: New release found: v1.1.0 (current version: v1.0.0) -} - -func ExampleCheckForUpdatesByPullRequest() { - // Mock the functions to prevent actual updates and network calls - originalDoUpdate := DoUpdate - originalNewGithubClient := NewGithubClient - defer func() { - DoUpdate = originalDoUpdate - NewGithubClient = originalNewGithubClient - }() - - NewGithubClient = func() GithubClient { - return &mockGithubClient{ - getReleaseByPR: func(ctx context.Context, owner, repo string, prNumber int) (*Release, error) { - if prNumber == 123 { - return &Release{ - TagName: "v1.1.0-alpha.pr.123", - Assets: []ReleaseAsset{{Name: fmt.Sprintf("test-asset-%s-%s", runtime.GOOS, runtime.GOARCH), DownloadURL: "http://example.com/asset-pr"}}, - }, nil - } - return nil, nil - }, - } - } - - DoUpdate = func(url string) error { - fmt.Printf("Update would be applied from: %s", url) - return nil - } - - err := CheckForUpdatesByPullRequest("owner", "repo", 123, "") - if err != nil { - log.Fatalf("CheckForUpdatesByPullRequest failed: %v", err) - } - // Output: - // Release v1.1.0-alpha.pr.123 found for PR #123. Applying update... - // Update would be applied from: http://example.com/asset-pr -} - -func ExampleCheckForUpdatesHTTP() { - // Create a mock HTTP server - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if r.URL.Path == "/latest.json" { - fmt.Fprintln(w, `{"version": "1.1.0", "url": "http://example.com/update"}`) - } - })) - defer server.Close() - - // Mock the doUpdateFunc to prevent actual updates - originalDoUpdate := DoUpdate - defer func() { DoUpdate = originalDoUpdate }() - DoUpdate = func(url string) error { - fmt.Printf("Update would be applied from: %s", url) - return nil - } - - Version = "1.0.0" - err := CheckForUpdatesHTTP(server.URL) - if err != nil { - log.Fatalf("CheckForUpdatesHTTP failed: %v", err) - } - // Output: - // Newer version 1.1.0 found (current: 1.0.0). Applying update... - // Update would be applied from: http://example.com/update -} - -func ExampleCheckOnlyHTTP() { - // Create a mock HTTP server - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if r.URL.Path == "/latest.json" { - fmt.Fprintln(w, `{"version": "1.1.0", "url": "http://example.com/update"}`) - } - })) - defer server.Close() - - Version = "1.0.0" - err := CheckOnlyHTTP(server.URL) - if err != nil { - log.Fatalf("CheckOnlyHTTP failed: %v", err) - } - // Output: New release found: 1.1.0 (current version: 1.0.0) -} diff --git a/pkg/vm/cmd_commands.go b/pkg/vm/cmd_commands.go deleted file mode 100644 index 2631e82..0000000 --- a/pkg/vm/cmd_commands.go +++ /dev/null @@ -1,13 +0,0 @@ -// Package vm provides LinuxKit virtual machine management commands. -// -// Commands: -// - run: Run a VM from image (.iso, .qcow2, .vmdk, .raw) or template -// - ps: List running VMs -// - stop: Stop a running VM -// - logs: View VM logs -// - exec: Execute command in VM via SSH -// - templates: Manage LinuxKit templates (list, build) -// -// Uses qemu or hyperkit depending on system availability. -// Templates are built from YAML definitions and can include variables. -package vm diff --git a/pkg/vm/cmd_container.go b/pkg/vm/cmd_container.go deleted file mode 100644 index 73188ce..0000000 --- a/pkg/vm/cmd_container.go +++ /dev/null @@ -1,344 +0,0 @@ -package vm - -import ( - "context" - "errors" - "fmt" - "io" - "os" - "strings" - "text/tabwriter" - "time" - - "github.com/host-uk/core/pkg/container" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -var ( - runName string - runDetach bool - runMemory int - runCPUs int - runSSHPort int - runTemplateName string - runVarFlags []string -) - -// addVMRunCommand adds the 'run' command under vm. -func addVMRunCommand(parent *cobra.Command) { - runCmd := &cobra.Command{ - Use: "run [image]", - Short: i18n.T("cmd.vm.run.short"), - Long: i18n.T("cmd.vm.run.long"), - RunE: func(cmd *cobra.Command, args []string) error { - opts := container.RunOptions{ - Name: runName, - Detach: runDetach, - Memory: runMemory, - CPUs: runCPUs, - SSHPort: runSSHPort, - } - - // If template is specified, build and run from template - if runTemplateName != "" { - vars := ParseVarFlags(runVarFlags) - return RunFromTemplate(runTemplateName, vars, opts) - } - - // Otherwise, require an image path - if len(args) == 0 { - return errors.New(i18n.T("cmd.vm.run.error.image_required")) - } - image := args[0] - - return runContainer(image, runName, runDetach, runMemory, runCPUs, runSSHPort) - }, - } - - runCmd.Flags().StringVar(&runName, "name", "", i18n.T("cmd.vm.run.flag.name")) - runCmd.Flags().BoolVarP(&runDetach, "detach", "d", false, i18n.T("cmd.vm.run.flag.detach")) - runCmd.Flags().IntVar(&runMemory, "memory", 0, i18n.T("cmd.vm.run.flag.memory")) - runCmd.Flags().IntVar(&runCPUs, "cpus", 0, i18n.T("cmd.vm.run.flag.cpus")) - runCmd.Flags().IntVar(&runSSHPort, "ssh-port", 0, i18n.T("cmd.vm.run.flag.ssh_port")) - runCmd.Flags().StringVar(&runTemplateName, "template", "", i18n.T("cmd.vm.run.flag.template")) - runCmd.Flags().StringArrayVar(&runVarFlags, "var", nil, i18n.T("cmd.vm.run.flag.var")) - - parent.AddCommand(runCmd) -} - -func runContainer(image, name string, detach bool, memory, cpus, sshPort int) error { - manager, err := container.NewLinuxKitManager() - if err != nil { - return fmt.Errorf(i18n.T("i18n.fail.init", "container manager")+": %w", err) - } - - opts := container.RunOptions{ - Name: name, - Detach: detach, - Memory: memory, - CPUs: cpus, - SSHPort: sshPort, - } - - fmt.Printf("%s %s\n", dimStyle.Render(i18n.Label("image")), image) - if name != "" { - fmt.Printf("%s %s\n", dimStyle.Render(i18n.T("cmd.vm.label.name")), name) - } - fmt.Printf("%s %s\n", dimStyle.Render(i18n.T("cmd.vm.label.hypervisor")), manager.Hypervisor().Name()) - fmt.Println() - - ctx := context.Background() - c, err := manager.Run(ctx, image, opts) - if err != nil { - return fmt.Errorf(i18n.T("i18n.fail.run", "container")+": %w", err) - } - - if detach { - fmt.Printf("%s %s\n", successStyle.Render(i18n.Label("started")), c.ID) - fmt.Printf("%s %d\n", dimStyle.Render(i18n.T("cmd.vm.label.pid")), c.PID) - fmt.Println() - fmt.Println(i18n.T("cmd.vm.hint.view_logs", map[string]interface{}{"ID": c.ID[:8]})) - fmt.Println(i18n.T("cmd.vm.hint.stop", map[string]interface{}{"ID": c.ID[:8]})) - } else { - fmt.Printf("\n%s %s\n", dimStyle.Render(i18n.T("cmd.vm.label.container_stopped")), c.ID) - } - - return nil -} - -var psAll bool - -// addVMPsCommand adds the 'ps' command under vm. -func addVMPsCommand(parent *cobra.Command) { - psCmd := &cobra.Command{ - Use: "ps", - Short: i18n.T("cmd.vm.ps.short"), - Long: i18n.T("cmd.vm.ps.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return listContainers(psAll) - }, - } - - psCmd.Flags().BoolVarP(&psAll, "all", "a", false, i18n.T("cmd.vm.ps.flag.all")) - - parent.AddCommand(psCmd) -} - -func listContainers(all bool) error { - manager, err := container.NewLinuxKitManager() - if err != nil { - return fmt.Errorf(i18n.T("i18n.fail.init", "container manager")+": %w", err) - } - - ctx := context.Background() - containers, err := manager.List(ctx) - if err != nil { - return fmt.Errorf(i18n.T("i18n.fail.list", "containers")+": %w", err) - } - - // Filter if not showing all - if !all { - filtered := make([]*container.Container, 0) - for _, c := range containers { - if c.Status == container.StatusRunning { - filtered = append(filtered, c) - } - } - containers = filtered - } - - if len(containers) == 0 { - if all { - fmt.Println(i18n.T("cmd.vm.ps.no_containers")) - } else { - fmt.Println(i18n.T("cmd.vm.ps.no_running")) - } - return nil - } - - w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) - fmt.Fprintln(w, i18n.T("cmd.vm.ps.header")) - fmt.Fprintln(w, "--\t----\t-----\t------\t-------\t---") - - for _, c := range containers { - // Shorten image path - imageName := c.Image - if len(imageName) > 30 { - imageName = "..." + imageName[len(imageName)-27:] - } - - // Format duration - duration := formatDuration(time.Since(c.StartedAt)) - - // Status with color - status := string(c.Status) - switch c.Status { - case container.StatusRunning: - status = successStyle.Render(status) - case container.StatusStopped: - status = dimStyle.Render(status) - case container.StatusError: - status = errorStyle.Render(status) - } - - fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%d\n", - c.ID[:8], c.Name, imageName, status, duration, c.PID) - } - - w.Flush() - return nil -} - -func formatDuration(d time.Duration) string { - if d < time.Minute { - return fmt.Sprintf("%ds", int(d.Seconds())) - } - if d < time.Hour { - return fmt.Sprintf("%dm", int(d.Minutes())) - } - if d < 24*time.Hour { - return fmt.Sprintf("%dh", int(d.Hours())) - } - return fmt.Sprintf("%dd", int(d.Hours()/24)) -} - -// addVMStopCommand adds the 'stop' command under vm. -func addVMStopCommand(parent *cobra.Command) { - stopCmd := &cobra.Command{ - Use: "stop ", - Short: i18n.T("cmd.vm.stop.short"), - Long: i18n.T("cmd.vm.stop.long"), - RunE: func(cmd *cobra.Command, args []string) error { - if len(args) == 0 { - return errors.New(i18n.T("cmd.vm.error.id_required")) - } - return stopContainer(args[0]) - }, - } - - parent.AddCommand(stopCmd) -} - -func stopContainer(id string) error { - manager, err := container.NewLinuxKitManager() - if err != nil { - return fmt.Errorf(i18n.T("i18n.fail.init", "container manager")+": %w", err) - } - - // Support partial ID matching - fullID, err := resolveContainerID(manager, id) - if err != nil { - return err - } - - fmt.Printf("%s %s\n", dimStyle.Render(i18n.T("cmd.vm.stop.stopping")), fullID[:8]) - - ctx := context.Background() - if err := manager.Stop(ctx, fullID); err != nil { - return fmt.Errorf(i18n.T("i18n.fail.stop", "container")+": %w", err) - } - - fmt.Printf("%s\n", successStyle.Render(i18n.T("common.status.stopped"))) - return nil -} - -// resolveContainerID resolves a partial ID to a full ID. -func resolveContainerID(manager *container.LinuxKitManager, partialID string) (string, error) { - ctx := context.Background() - containers, err := manager.List(ctx) - if err != nil { - return "", err - } - - var matches []*container.Container - for _, c := range containers { - if strings.HasPrefix(c.ID, partialID) || strings.HasPrefix(c.Name, partialID) { - matches = append(matches, c) - } - } - - switch len(matches) { - case 0: - return "", errors.New(i18n.T("cmd.vm.error.no_match", map[string]interface{}{"ID": partialID})) - case 1: - return matches[0].ID, nil - default: - return "", errors.New(i18n.T("cmd.vm.error.multiple_match", map[string]interface{}{"ID": partialID})) - } -} - -var logsFollow bool - -// addVMLogsCommand adds the 'logs' command under vm. -func addVMLogsCommand(parent *cobra.Command) { - logsCmd := &cobra.Command{ - Use: "logs ", - Short: i18n.T("cmd.vm.logs.short"), - Long: i18n.T("cmd.vm.logs.long"), - RunE: func(cmd *cobra.Command, args []string) error { - if len(args) == 0 { - return errors.New(i18n.T("cmd.vm.error.id_required")) - } - return viewLogs(args[0], logsFollow) - }, - } - - logsCmd.Flags().BoolVarP(&logsFollow, "follow", "f", false, i18n.T("common.flag.follow")) - - parent.AddCommand(logsCmd) -} - -func viewLogs(id string, follow bool) error { - manager, err := container.NewLinuxKitManager() - if err != nil { - return fmt.Errorf(i18n.T("i18n.fail.init", "container manager")+": %w", err) - } - - fullID, err := resolveContainerID(manager, id) - if err != nil { - return err - } - - ctx := context.Background() - reader, err := manager.Logs(ctx, fullID, follow) - if err != nil { - return fmt.Errorf(i18n.T("i18n.fail.get", "logs")+": %w", err) - } - defer reader.Close() - - _, err = io.Copy(os.Stdout, reader) - return err -} - -// addVMExecCommand adds the 'exec' command under vm. -func addVMExecCommand(parent *cobra.Command) { - execCmd := &cobra.Command{ - Use: "exec [args...]", - Short: i18n.T("cmd.vm.exec.short"), - Long: i18n.T("cmd.vm.exec.long"), - RunE: func(cmd *cobra.Command, args []string) error { - if len(args) < 2 { - return errors.New(i18n.T("cmd.vm.error.id_and_cmd_required")) - } - return execInContainer(args[0], args[1:]) - }, - } - - parent.AddCommand(execCmd) -} - -func execInContainer(id string, cmd []string) error { - manager, err := container.NewLinuxKitManager() - if err != nil { - return fmt.Errorf(i18n.T("i18n.fail.init", "container manager")+": %w", err) - } - - fullID, err := resolveContainerID(manager, id) - if err != nil { - return err - } - - ctx := context.Background() - return manager.Exec(ctx, fullID, cmd) -} diff --git a/pkg/vm/cmd_templates.go b/pkg/vm/cmd_templates.go deleted file mode 100644 index 040939f..0000000 --- a/pkg/vm/cmd_templates.go +++ /dev/null @@ -1,310 +0,0 @@ -package vm - -import ( - "context" - "errors" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - "text/tabwriter" - - "github.com/host-uk/core/pkg/container" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -// addVMTemplatesCommand adds the 'templates' command under vm. -func addVMTemplatesCommand(parent *cobra.Command) { - templatesCmd := &cobra.Command{ - Use: "templates", - Short: i18n.T("cmd.vm.templates.short"), - Long: i18n.T("cmd.vm.templates.long"), - RunE: func(cmd *cobra.Command, args []string) error { - return listTemplates() - }, - } - - // Add subcommands - addTemplatesShowCommand(templatesCmd) - addTemplatesVarsCommand(templatesCmd) - - parent.AddCommand(templatesCmd) -} - -// addTemplatesShowCommand adds the 'templates show' subcommand. -func addTemplatesShowCommand(parent *cobra.Command) { - showCmd := &cobra.Command{ - Use: "show ", - Short: i18n.T("cmd.vm.templates.show.short"), - Long: i18n.T("cmd.vm.templates.show.long"), - RunE: func(cmd *cobra.Command, args []string) error { - if len(args) == 0 { - return errors.New(i18n.T("cmd.vm.error.template_required")) - } - return showTemplate(args[0]) - }, - } - - parent.AddCommand(showCmd) -} - -// addTemplatesVarsCommand adds the 'templates vars' subcommand. -func addTemplatesVarsCommand(parent *cobra.Command) { - varsCmd := &cobra.Command{ - Use: "vars ", - Short: i18n.T("cmd.vm.templates.vars.short"), - Long: i18n.T("cmd.vm.templates.vars.long"), - RunE: func(cmd *cobra.Command, args []string) error { - if len(args) == 0 { - return errors.New(i18n.T("cmd.vm.error.template_required")) - } - return showTemplateVars(args[0]) - }, - } - - parent.AddCommand(varsCmd) -} - -func listTemplates() error { - templates := container.ListTemplates() - - if len(templates) == 0 { - fmt.Println(i18n.T("cmd.vm.templates.no_templates")) - return nil - } - - fmt.Printf("%s\n\n", repoNameStyle.Render(i18n.T("cmd.vm.templates.title"))) - - w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) - fmt.Fprintln(w, i18n.T("cmd.vm.templates.header")) - fmt.Fprintln(w, "----\t-----------") - - for _, tmpl := range templates { - desc := tmpl.Description - if len(desc) > 60 { - desc = desc[:57] + "..." - } - fmt.Fprintf(w, "%s\t%s\n", repoNameStyle.Render(tmpl.Name), desc) - } - w.Flush() - - fmt.Println() - fmt.Printf("%s %s\n", i18n.T("cmd.vm.templates.hint.show"), dimStyle.Render("core vm templates show ")) - fmt.Printf("%s %s\n", i18n.T("cmd.vm.templates.hint.vars"), dimStyle.Render("core vm templates vars ")) - fmt.Printf("%s %s\n", i18n.T("cmd.vm.templates.hint.run"), dimStyle.Render("core vm run --template --var SSH_KEY=\"...\"")) - - return nil -} - -func showTemplate(name string) error { - content, err := container.GetTemplate(name) - if err != nil { - return err - } - - fmt.Printf("%s %s\n\n", dimStyle.Render(i18n.T("common.label.template")), repoNameStyle.Render(name)) - fmt.Println(content) - - return nil -} - -func showTemplateVars(name string) error { - content, err := container.GetTemplate(name) - if err != nil { - return err - } - - required, optional := container.ExtractVariables(content) - - fmt.Printf("%s %s\n\n", dimStyle.Render(i18n.T("common.label.template")), repoNameStyle.Render(name)) - - if len(required) > 0 { - fmt.Printf("%s\n", errorStyle.Render(i18n.T("cmd.vm.templates.vars.required"))) - for _, v := range required { - fmt.Printf(" %s\n", varStyle.Render("${"+v+"}")) - } - fmt.Println() - } - - if len(optional) > 0 { - fmt.Printf("%s\n", successStyle.Render(i18n.T("cmd.vm.templates.vars.optional"))) - for v, def := range optional { - fmt.Printf(" %s = %s\n", - varStyle.Render("${"+v+"}"), - defaultStyle.Render(def)) - } - fmt.Println() - } - - if len(required) == 0 && len(optional) == 0 { - fmt.Println(i18n.T("cmd.vm.templates.vars.none")) - } - - return nil -} - -// RunFromTemplate builds and runs a LinuxKit image from a template. -func RunFromTemplate(templateName string, vars map[string]string, runOpts container.RunOptions) error { - // Apply template with variables - content, err := container.ApplyTemplate(templateName, vars) - if err != nil { - return fmt.Errorf(i18n.T("common.error.failed", map[string]any{"Action": "apply template"})+": %w", err) - } - - // Create a temporary directory for the build - tmpDir, err := os.MkdirTemp("", "core-linuxkit-*") - if err != nil { - return fmt.Errorf(i18n.T("common.error.failed", map[string]any{"Action": "create temp directory"})+": %w", err) - } - defer os.RemoveAll(tmpDir) - - // Write the YAML file - yamlPath := filepath.Join(tmpDir, templateName+".yml") - if err := os.WriteFile(yamlPath, []byte(content), 0644); err != nil { - return fmt.Errorf(i18n.T("common.error.failed", map[string]any{"Action": "write template"})+": %w", err) - } - - fmt.Printf("%s %s\n", dimStyle.Render(i18n.T("common.label.template")), repoNameStyle.Render(templateName)) - fmt.Printf("%s %s\n", dimStyle.Render(i18n.T("cmd.vm.label.building")), yamlPath) - - // Build the image using linuxkit - outputPath := filepath.Join(tmpDir, templateName) - if err := buildLinuxKitImage(yamlPath, outputPath); err != nil { - return fmt.Errorf(i18n.T("common.error.failed", map[string]any{"Action": "build image"})+": %w", err) - } - - // Find the built image (linuxkit creates .iso or other format) - imagePath := findBuiltImage(outputPath) - if imagePath == "" { - return errors.New(i18n.T("cmd.vm.error.no_image_found")) - } - - fmt.Printf("%s %s\n", dimStyle.Render(i18n.T("common.label.image")), imagePath) - fmt.Println() - - // Run the image - manager, err := container.NewLinuxKitManager() - if err != nil { - return fmt.Errorf(i18n.T("common.error.failed", map[string]any{"Action": "initialize container manager"})+": %w", err) - } - - fmt.Printf("%s %s\n", dimStyle.Render(i18n.T("cmd.vm.label.hypervisor")), manager.Hypervisor().Name()) - fmt.Println() - - ctx := context.Background() - c, err := manager.Run(ctx, imagePath, runOpts) - if err != nil { - return fmt.Errorf(i18n.T("common.error.failed", map[string]any{"Action": "run container"})+": %w", err) - } - - if runOpts.Detach { - fmt.Printf("%s %s\n", successStyle.Render(i18n.T("common.label.started")), c.ID) - fmt.Printf("%s %d\n", dimStyle.Render(i18n.T("cmd.vm.label.pid")), c.PID) - fmt.Println() - fmt.Println(i18n.T("cmd.vm.hint.view_logs", map[string]interface{}{"ID": c.ID[:8]})) - fmt.Println(i18n.T("cmd.vm.hint.stop", map[string]interface{}{"ID": c.ID[:8]})) - } else { - fmt.Printf("\n%s %s\n", dimStyle.Render(i18n.T("cmd.vm.label.container_stopped")), c.ID) - } - - return nil -} - -// buildLinuxKitImage builds a LinuxKit image from a YAML file. -func buildLinuxKitImage(yamlPath, outputPath string) error { - // Check if linuxkit is available - lkPath, err := lookupLinuxKit() - if err != nil { - return err - } - - // Build the image - // linuxkit build --format iso-bios --name - cmd := exec.Command(lkPath, "build", - "--format", "iso-bios", - "--name", outputPath, - yamlPath) - - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - return cmd.Run() -} - -// findBuiltImage finds the built image file. -func findBuiltImage(basePath string) string { - // LinuxKit can create different formats - extensions := []string{".iso", "-bios.iso", ".qcow2", ".raw", ".vmdk"} - - for _, ext := range extensions { - path := basePath + ext - if _, err := os.Stat(path); err == nil { - return path - } - } - - // Check directory for any image file - dir := filepath.Dir(basePath) - base := filepath.Base(basePath) - - entries, err := os.ReadDir(dir) - if err != nil { - return "" - } - - for _, entry := range entries { - name := entry.Name() - if strings.HasPrefix(name, base) { - for _, ext := range []string{".iso", ".qcow2", ".raw", ".vmdk"} { - if strings.HasSuffix(name, ext) { - return filepath.Join(dir, name) - } - } - } - } - - return "" -} - -// lookupLinuxKit finds the linuxkit binary. -func lookupLinuxKit() (string, error) { - // Check PATH first - if path, err := exec.LookPath("linuxkit"); err == nil { - return path, nil - } - - // Check common locations - paths := []string{ - "/usr/local/bin/linuxkit", - "/opt/homebrew/bin/linuxkit", - } - - for _, p := range paths { - if _, err := os.Stat(p); err == nil { - return p, nil - } - } - - return "", errors.New(i18n.T("cmd.vm.error.linuxkit_not_found")) -} - -// ParseVarFlags parses --var flags into a map. -// Format: --var KEY=VALUE or --var KEY="VALUE" -func ParseVarFlags(varFlags []string) map[string]string { - vars := make(map[string]string) - - for _, v := range varFlags { - parts := strings.SplitN(v, "=", 2) - if len(parts) == 2 { - key := strings.TrimSpace(parts[0]) - value := strings.TrimSpace(parts[1]) - // Remove surrounding quotes if present - value = strings.Trim(value, "\"'") - vars[key] = value - } - } - - return vars -} diff --git a/pkg/vm/cmd_vm.go b/pkg/vm/cmd_vm.go deleted file mode 100644 index 5be2c77..0000000 --- a/pkg/vm/cmd_vm.go +++ /dev/null @@ -1,43 +0,0 @@ -// Package vm provides LinuxKit VM management commands. -package vm - -import ( - "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/i18n" - "github.com/spf13/cobra" -) - -func init() { - cli.RegisterCommands(AddVMCommands) -} - -// Style aliases from shared -var ( - repoNameStyle = cli.RepoStyle - successStyle = cli.SuccessStyle - errorStyle = cli.ErrorStyle - dimStyle = cli.DimStyle -) - -// VM-specific styles -var ( - varStyle = cli.NewStyle().Foreground(cli.ColourAmber500) - defaultStyle = cli.NewStyle().Foreground(cli.ColourGray500).Italic() -) - -// AddVMCommands adds container-related commands under 'vm' to the CLI. -func AddVMCommands(root *cobra.Command) { - vmCmd := &cobra.Command{ - Use: "vm", - Short: i18n.T("cmd.vm.short"), - Long: i18n.T("cmd.vm.long"), - } - - root.AddCommand(vmCmd) - addVMRunCommand(vmCmd) - addVMPsCommand(vmCmd) - addVMStopCommand(vmCmd) - addVMLogsCommand(vmCmd) - addVMExecCommand(vmCmd) - addVMTemplatesCommand(vmCmd) -} \ No newline at end of file diff --git a/pkg/workspace/cmd.go b/pkg/workspace/cmd.go deleted file mode 100644 index 8c45ff0..0000000 --- a/pkg/workspace/cmd.go +++ /dev/null @@ -1,7 +0,0 @@ -package workspace - -import "github.com/host-uk/core/pkg/cli" - -func init() { - cli.RegisterCommands(AddWorkspaceCommands) -} diff --git a/pkg/workspace/cmd_workspace.go b/pkg/workspace/cmd_workspace.go deleted file mode 100644 index a25b116..0000000 --- a/pkg/workspace/cmd_workspace.go +++ /dev/null @@ -1,87 +0,0 @@ -package workspace - -import ( - "strings" - - "github.com/host-uk/core/pkg/cli" - "github.com/spf13/cobra" -) - -func AddWorkspaceCommands(root *cobra.Command) { - wsCmd := &cobra.Command{ - Use: "workspace", - Short: "Manage workspace configuration", - RunE: runWorkspaceInfo, - } - - wsCmd.AddCommand(&cobra.Command{ - Use: "active [package]", - Short: "Show or set the active package", - RunE: runWorkspaceActive, - }) - - root.AddCommand(wsCmd) -} - -func runWorkspaceInfo(cmd *cobra.Command, args []string) error { - root, err := FindWorkspaceRoot() - if err != nil { - return cli.Err("not in a workspace") - } - - config, err := LoadConfig(root) - if err != nil { - return err - } - if config == nil { - return cli.Err("workspace config not found") - } - - cli.Print("Active: %s\n", cli.ValueStyle.Render(config.Active)) - cli.Print("Packages: %s\n", cli.DimStyle.Render(config.PackagesDir)) - if len(config.DefaultOnly) > 0 { - cli.Print("Types: %s\n", cli.DimStyle.Render(strings.Join(config.DefaultOnly, ", "))) - } - - return nil -} - -func runWorkspaceActive(cmd *cobra.Command, args []string) error { - root, err := FindWorkspaceRoot() - if err != nil { - return cli.Err("not in a workspace") - } - - config, err := LoadConfig(root) - if err != nil { - return err - } - if config == nil { - config = DefaultConfig() - } - - // If no args, show active - if len(args) == 0 { - if config.Active == "" { - cli.Println("No active package set") - return nil - } - cli.Text(config.Active) - return nil - } - - // Set active - target := args[0] - if target == config.Active { - cli.Print("Active package is already %s\n", cli.ValueStyle.Render(target)) - return nil - } - - config.Active = target - if err := SaveConfig(root, config); err != nil { - return err - } - - cli.Print("Active package set to %s\n", cli.SuccessStyle.Render(target)) - return nil -} diff --git a/pkg/workspace/config.go b/pkg/workspace/config.go deleted file mode 100644 index fc781b5..0000000 --- a/pkg/workspace/config.go +++ /dev/null @@ -1,97 +0,0 @@ -package workspace - -import ( - "fmt" - "os" - "path/filepath" - - "gopkg.in/yaml.v3" -) - -// WorkspaceConfig holds workspace-level configuration from .core/workspace.yaml. -type WorkspaceConfig struct { - Version int `yaml:"version"` - Active string `yaml:"active"` // Active package name - DefaultOnly []string `yaml:"default_only"` // Default types for setup - PackagesDir string `yaml:"packages_dir"` // Where packages are cloned -} - -// DefaultConfig returns a config with default values. -func DefaultConfig() *WorkspaceConfig { - return &WorkspaceConfig{ - Version: 1, - PackagesDir: "./packages", - } -} - -// LoadConfig tries to load workspace.yaml from the given directory's .core subfolder. -// Returns nil if no config file exists (caller should check for nil). -func LoadConfig(dir string) (*WorkspaceConfig, error) { - path := filepath.Join(dir, ".core", "workspace.yaml") - data, err := os.ReadFile(path) - if err != nil { - if os.IsNotExist(err) { - // Try parent directory - parent := filepath.Dir(dir) - if parent != dir { - return LoadConfig(parent) - } - // No workspace.yaml found anywhere - return nil to indicate no config - return nil, nil - } - return nil, fmt.Errorf("failed to read workspace config: %w", err) - } - - config := DefaultConfig() - if err := yaml.Unmarshal(data, config); err != nil { - return nil, fmt.Errorf("failed to parse workspace config: %w", err) - } - - if config.Version != 1 { - return nil, fmt.Errorf("unsupported workspace config version: %d", config.Version) - } - - return config, nil -} - -// SaveConfig saves the configuration to the given directory's .core/workspace.yaml. -func SaveConfig(dir string, config *WorkspaceConfig) error { - coreDir := filepath.Join(dir, ".core") - if err := os.MkdirAll(coreDir, 0755); err != nil { - return fmt.Errorf("failed to create .core directory: %w", err) - } - - path := filepath.Join(coreDir, "workspace.yaml") - data, err := yaml.Marshal(config) - if err != nil { - return fmt.Errorf("failed to marshal workspace config: %w", err) - } - - if err := os.WriteFile(path, data, 0644); err != nil { - return fmt.Errorf("failed to write workspace config: %w", err) - } - - return nil -} - -// FindWorkspaceRoot searches for the root directory containing .core/workspace.yaml. -func FindWorkspaceRoot() (string, error) { - dir, err := os.Getwd() - if err != nil { - return "", err - } - - for { - if _, err := os.Stat(filepath.Join(dir, ".core", "workspace.yaml")); err == nil { - return dir, nil - } - - parent := filepath.Dir(dir) - if parent == dir { - break - } - dir = parent - } - - return "", fmt.Errorf("not in a workspace") -} diff --git a/runtime.go b/runtime.go new file mode 100644 index 0000000..3e48afb --- /dev/null +++ b/runtime.go @@ -0,0 +1,153 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Runtime helpers for the Core framework. +// ServiceRuntime is embedded by consumer services. +// Runtime is the GUI binding container (e.g., Wails). + +package core + +import ( + "context" + "maps" + "slices" +) + +// --- ServiceRuntime (embedded by consumer services) --- + +// ServiceRuntime is embedded in services to provide access to the Core and typed options. +type ServiceRuntime[T any] struct { + core *Core + opts T +} + +// NewServiceRuntime creates a ServiceRuntime for a service constructor. +func NewServiceRuntime[T any](c *Core, opts T) *ServiceRuntime[T] { + return &ServiceRuntime[T]{core: c, opts: opts} +} + +func (r *ServiceRuntime[T]) Core() *Core { return r.core } +func (r *ServiceRuntime[T]) Options() T { return r.opts } +func (r *ServiceRuntime[T]) Config() *Config { return r.core.Config() } + +// --- Lifecycle --- + +// ServiceStartup runs OnStart for all registered services that have one. +func (c *Core) ServiceStartup(ctx context.Context, options any) Result { + c.shutdown.Store(false) + c.context, c.cancel = context.WithCancel(ctx) + startables := c.Startables() + if startables.OK { + for _, s := range startables.Value.([]*Service) { + if err := ctx.Err(); err != nil { + return Result{err, false} + } + r := s.OnStart() + if !r.OK { + return r + } + } + } + c.ACTION(ActionServiceStartup{}) + return Result{OK: true} +} + +// ServiceShutdown drains background tasks, then stops all registered services. +func (c *Core) ServiceShutdown(ctx context.Context) Result { + c.shutdown.Store(true) + c.cancel() // signal all context-aware tasks to stop + c.ACTION(ActionServiceShutdown{}) + + // Drain background tasks before stopping services. + done := make(chan struct{}) + go func() { + c.waitGroup.Wait() + close(done) + }() + select { + case <-done: + case <-ctx.Done(): + return Result{ctx.Err(), false} + } + + // Stop services + var firstErr error + stoppables := c.Stoppables() + if stoppables.OK { + for _, s := range stoppables.Value.([]*Service) { + if err := ctx.Err(); err != nil { + return Result{err, false} + } + r := s.OnStop() + if !r.OK && firstErr == nil { + if e, ok := r.Value.(error); ok { + firstErr = e + } else { + firstErr = E("core.ServiceShutdown", Sprint("service OnStop failed: ", r.Value), nil) + } + } + } + } + if firstErr != nil { + return Result{firstErr, false} + } + return Result{OK: true} +} + +// --- Runtime DTO (GUI binding) --- + +// Runtime is the container for GUI runtimes (e.g., Wails). +type Runtime struct { + app any + Core *Core +} + +// ServiceFactory defines a function that creates a Service. +type ServiceFactory func() Result + +// NewWithFactories creates a Runtime with the provided service factories. +func NewWithFactories(app any, factories map[string]ServiceFactory) Result { + r := New(WithOptions(Options{{Key: "name", Value: "core"}})) + if !r.OK { + return r + } + c := r.Value.(*Core) + c.app.Runtime = app + + names := slices.Sorted(maps.Keys(factories)) + for _, name := range names { + factory := factories[name] + if factory == nil { + continue + } + r := factory() + if !r.OK { + cause, _ := r.Value.(error) + return Result{E("core.NewWithFactories", Concat("factory \"", name, "\" failed"), cause), false} + } + svc, ok := r.Value.(Service) + if !ok { + return Result{E("core.NewWithFactories", Concat("factory \"", name, "\" returned non-Service type"), nil), false} + } + sr := c.Service(name, svc) + if !sr.OK { + return sr + } + } + return Result{&Runtime{app: app, Core: c}, true} +} + +// NewRuntime creates a Runtime with no custom services. +func NewRuntime(app any) Result { + return NewWithFactories(app, map[string]ServiceFactory{}) +} + +func (r *Runtime) ServiceName() string { return "Core" } +func (r *Runtime) ServiceStartup(ctx context.Context, options any) Result { + return r.Core.ServiceStartup(ctx, options) +} +func (r *Runtime) ServiceShutdown(ctx context.Context) Result { + if r.Core != nil { + return r.Core.ServiceShutdown(ctx) + } + return Result{OK: true} +} diff --git a/runtime_test.go b/runtime_test.go new file mode 100644 index 0000000..3da01e5 --- /dev/null +++ b/runtime_test.go @@ -0,0 +1,121 @@ +package core_test + +import ( + "context" + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- ServiceRuntime --- + +type testOpts struct { + URL string + Timeout int +} + +func TestServiceRuntime_Good(t *testing.T) { + c := New().Value.(*Core) + opts := testOpts{URL: "https://api.lthn.ai", Timeout: 30} + rt := NewServiceRuntime(c, opts) + + assert.Equal(t, c, rt.Core()) + assert.Equal(t, opts, rt.Options()) + assert.Equal(t, "https://api.lthn.ai", rt.Options().URL) + assert.NotNil(t, rt.Config()) +} + +// --- NewWithFactories --- + +func TestNewWithFactories_Good(t *testing.T) { + r := NewWithFactories(nil, map[string]ServiceFactory{ + "svc1": func() Result { return Result{Value: Service{}, OK: true} }, + "svc2": func() Result { return Result{Value: Service{}, OK: true} }, + }) + assert.True(t, r.OK) + rt := r.Value.(*Runtime) + assert.NotNil(t, rt.Core) +} + +func TestNewWithFactories_NilFactory_Good(t *testing.T) { + r := NewWithFactories(nil, map[string]ServiceFactory{ + "bad": nil, + }) + assert.True(t, r.OK) // nil factories skipped +} + +func TestNewRuntime_Good(t *testing.T) { + r := NewRuntime(nil) + assert.True(t, r.OK) +} + +func TestRuntime_ServiceName_Good(t *testing.T) { + r := NewRuntime(nil) + rt := r.Value.(*Runtime) + assert.Equal(t, "Core", rt.ServiceName()) +} + +// --- Lifecycle via Runtime --- + +func TestRuntime_Lifecycle_Good(t *testing.T) { + started := false + r := NewWithFactories(nil, map[string]ServiceFactory{ + "test": func() Result { + return Result{Value: Service{ + OnStart: func() Result { started = true; return Result{OK: true} }, + }, OK: true} + }, + }) + assert.True(t, r.OK) + rt := r.Value.(*Runtime) + + result := rt.ServiceStartup(context.Background(), nil) + assert.True(t, result.OK) + assert.True(t, started) +} + +func TestRuntime_ServiceShutdown_Good(t *testing.T) { + stopped := false + r := NewWithFactories(nil, map[string]ServiceFactory{ + "test": func() Result { + return Result{Value: Service{ + OnStart: func() Result { return Result{OK: true} }, + OnStop: func() Result { stopped = true; return Result{OK: true} }, + }, OK: true} + }, + }) + assert.True(t, r.OK) + rt := r.Value.(*Runtime) + + rt.ServiceStartup(context.Background(), nil) + result := rt.ServiceShutdown(context.Background()) + assert.True(t, result.OK) + assert.True(t, stopped) +} + +func TestRuntime_ServiceShutdown_NilCore_Good(t *testing.T) { + rt := &Runtime{} + result := rt.ServiceShutdown(context.Background()) + assert.True(t, result.OK) +} + +func TestCore_ServiceShutdown_Good(t *testing.T) { + stopped := false + c := New().Value.(*Core) + c.Service("test", Service{ + OnStart: func() Result { return Result{OK: true} }, + OnStop: func() Result { stopped = true; return Result{OK: true} }, + }) + c.ServiceStartup(context.Background(), nil) + result := c.ServiceShutdown(context.Background()) + assert.True(t, result.OK) + assert.True(t, stopped) +} + +func TestCore_Context_Good(t *testing.T) { + c := New().Value.(*Core) + c.ServiceStartup(context.Background(), nil) + assert.NotNil(t, c.Context()) + c.ServiceShutdown(context.Background()) +} diff --git a/service.go b/service.go new file mode 100644 index 0000000..1e82dd6 --- /dev/null +++ b/service.go @@ -0,0 +1,83 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Service registry for the Core framework. +// +// Register a service: +// +// c.Service("auth", core.Service{}) +// +// Get a service: +// +// r := c.Service("auth") +// if r.OK { svc := r.Value } + +package core + +// No imports needed — uses package-level string helpers. + +// Service is a managed component with optional lifecycle. +type Service struct { + Name string + Options Options + OnStart func() Result + OnStop func() Result + OnReload func() Result +} + +// serviceRegistry holds registered services. +type serviceRegistry struct { + services map[string]*Service + lockEnabled bool + locked bool +} + +// --- Core service methods --- + +// Service gets or registers a service by name. +// +// c.Service("auth", core.Service{OnStart: startFn}) +// r := c.Service("auth") +func (c *Core) Service(name string, service ...Service) Result { + if len(service) == 0 { + c.Lock("srv").Mutex.RLock() + v, ok := c.services.services[name] + c.Lock("srv").Mutex.RUnlock() + return Result{v, ok} + } + + if name == "" { + return Result{E("core.Service", "service name cannot be empty", nil), false} + } + + c.Lock("srv").Mutex.Lock() + defer c.Lock("srv").Mutex.Unlock() + + if c.services.locked { + return Result{E("core.Service", Concat("service \"", name, "\" not permitted — registry locked"), nil), false} + } + if _, exists := c.services.services[name]; exists { + return Result{E("core.Service", Join(" ", "service", name, "already registered"), nil), false} + } + + srv := &service[0] + srv.Name = name + c.services.services[name] = srv + + return Result{OK: true} +} + +// Services returns all registered service names. +// +// names := c.Services() +func (c *Core) Services() []string { + if c.services == nil { + return nil + } + c.Lock("srv").Mutex.RLock() + defer c.Lock("srv").Mutex.RUnlock() + var names []string + for k := range c.services.services { + names = append(names, k) + } + return names +} diff --git a/service_test.go b/service_test.go new file mode 100644 index 0000000..ddd32fd --- /dev/null +++ b/service_test.go @@ -0,0 +1,79 @@ +package core_test + +import ( + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- Service Registration --- + +func TestService_Register_Good(t *testing.T) { + c := New().Value.(*Core) + r := c.Service("auth", Service{}) + assert.True(t, r.OK) +} + +func TestService_Register_Duplicate_Bad(t *testing.T) { + c := New().Value.(*Core) + c.Service("auth", Service{}) + r := c.Service("auth", Service{}) + assert.False(t, r.OK) +} + +func TestService_Register_Empty_Bad(t *testing.T) { + c := New().Value.(*Core) + r := c.Service("", Service{}) + assert.False(t, r.OK) +} + +func TestService_Get_Good(t *testing.T) { + c := New().Value.(*Core) + c.Service("brain", Service{OnStart: func() Result { return Result{OK: true} }}) + r := c.Service("brain") + assert.True(t, r.OK) + assert.NotNil(t, r.Value) +} + +func TestService_Get_Bad(t *testing.T) { + c := New().Value.(*Core) + r := c.Service("nonexistent") + assert.False(t, r.OK) +} + +func TestService_Names_Good(t *testing.T) { + c := New().Value.(*Core) + c.Service("a", Service{}) + c.Service("b", Service{}) + names := c.Services() + assert.Len(t, names, 2) + assert.Contains(t, names, "a") + assert.Contains(t, names, "b") +} + +// --- Service Lifecycle --- + +func TestService_Lifecycle_Good(t *testing.T) { + c := New().Value.(*Core) + started := false + stopped := false + c.Service("lifecycle", Service{ + OnStart: func() Result { started = true; return Result{OK: true} }, + OnStop: func() Result { stopped = true; return Result{OK: true} }, + }) + + sr := c.Startables() + assert.True(t, sr.OK) + startables := sr.Value.([]*Service) + assert.Len(t, startables, 1) + startables[0].OnStart() + assert.True(t, started) + + tr := c.Stoppables() + assert.True(t, tr.OK) + stoppables := tr.Value.([]*Service) + assert.Len(t, stoppables, 1) + stoppables[0].OnStop() + assert.True(t, stopped) +} diff --git a/string.go b/string.go new file mode 100644 index 0000000..4c64aa7 --- /dev/null +++ b/string.go @@ -0,0 +1,157 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// String operations for the Core framework. +// Provides safe, predictable string helpers that downstream packages +// use directly — same pattern as Array[T] for slices. + +package core + +import ( + "fmt" + "strings" + "unicode/utf8" +) + +// HasPrefix returns true if s starts with prefix. +// +// core.HasPrefix("--verbose", "--") // true +func HasPrefix(s, prefix string) bool { + return strings.HasPrefix(s, prefix) +} + +// HasSuffix returns true if s ends with suffix. +// +// core.HasSuffix("test.go", ".go") // true +func HasSuffix(s, suffix string) bool { + return strings.HasSuffix(s, suffix) +} + +// TrimPrefix removes prefix from s. +// +// core.TrimPrefix("--verbose", "--") // "verbose" +func TrimPrefix(s, prefix string) string { + return strings.TrimPrefix(s, prefix) +} + +// TrimSuffix removes suffix from s. +// +// core.TrimSuffix("test.go", ".go") // "test" +func TrimSuffix(s, suffix string) string { + return strings.TrimSuffix(s, suffix) +} + +// Contains returns true if s contains substr. +// +// core.Contains("hello world", "world") // true +func Contains(s, substr string) bool { + return strings.Contains(s, substr) +} + +// Split splits s by separator. +// +// core.Split("a/b/c", "/") // ["a", "b", "c"] +func Split(s, sep string) []string { + return strings.Split(s, sep) +} + +// SplitN splits s by separator into at most n parts. +// +// core.SplitN("key=value=extra", "=", 2) // ["key", "value=extra"] +func SplitN(s, sep string, n int) []string { + return strings.SplitN(s, sep, n) +} + +// Join joins parts with a separator, building via Concat. +// +// core.Join("/", "deploy", "to", "homelab") // "deploy/to/homelab" +// core.Join(".", "cmd", "deploy", "description") // "cmd.deploy.description" +func Join(sep string, parts ...string) string { + if len(parts) == 0 { + return "" + } + result := parts[0] + for _, p := range parts[1:] { + result = Concat(result, sep, p) + } + return result +} + +// Replace replaces all occurrences of old with new in s. +// +// core.Replace("deploy/to/homelab", "/", ".") // "deploy.to.homelab" +func Replace(s, old, new string) string { + return strings.ReplaceAll(s, old, new) +} + +// Lower returns s in lowercase. +// +// core.Lower("HELLO") // "hello" +func Lower(s string) string { + return strings.ToLower(s) +} + +// Upper returns s in uppercase. +// +// core.Upper("hello") // "HELLO" +func Upper(s string) string { + return strings.ToUpper(s) +} + +// Trim removes leading and trailing whitespace. +// +// core.Trim(" hello ") // "hello" +func Trim(s string) string { + return strings.TrimSpace(s) +} + +// RuneCount returns the number of runes (unicode characters) in s. +// +// core.RuneCount("hello") // 5 +// core.RuneCount("🔥") // 1 +func RuneCount(s string) int { + return utf8.RuneCountInString(s) +} + +// NewBuilder returns a new strings.Builder. +// +// b := core.NewBuilder() +// b.WriteString("hello") +// b.String() // "hello" +func NewBuilder() *strings.Builder { + return &strings.Builder{} +} + +// NewReader returns a strings.NewReader for the given string. +// +// r := core.NewReader("hello world") +func NewReader(s string) *strings.Reader { + return strings.NewReader(s) +} + +// Sprint converts any value to its string representation. +// +// core.Sprint(42) // "42" +// core.Sprint(err) // "connection refused" +func Sprint(args ...any) string { + return fmt.Sprint(args...) +} + +// Sprintf formats a string with the given arguments. +// +// core.Sprintf("%v=%q", "key", "value") // `key="value"` +func Sprintf(format string, args ...any) string { + return fmt.Sprintf(format, args...) +} + +// Concat joins variadic string parts into one string. +// Hook point for validation, sanitisation, and security checks. +// +// core.Concat("cmd.", "deploy.to.homelab", ".description") +// core.Concat("https://", host, "/api/v1") +func Concat(parts ...string) string { + b := NewBuilder() + for _, p := range parts { + b.WriteString(p) + } + return b.String() +} diff --git a/string_test.go b/string_test.go new file mode 100644 index 0000000..5c821ea --- /dev/null +++ b/string_test.go @@ -0,0 +1,70 @@ +package core_test + +import ( + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- String Operations --- + +func TestHasPrefix_Good(t *testing.T) { + assert.True(t, HasPrefix("--verbose", "--")) + assert.True(t, HasPrefix("-v", "-")) + assert.False(t, HasPrefix("hello", "-")) +} + +func TestHasSuffix_Good(t *testing.T) { + assert.True(t, HasSuffix("test.go", ".go")) + assert.False(t, HasSuffix("test.go", ".py")) +} + +func TestTrimPrefix_Good(t *testing.T) { + assert.Equal(t, "verbose", TrimPrefix("--verbose", "--")) + assert.Equal(t, "hello", TrimPrefix("hello", "--")) +} + +func TestTrimSuffix_Good(t *testing.T) { + assert.Equal(t, "test", TrimSuffix("test.go", ".go")) + assert.Equal(t, "test.go", TrimSuffix("test.go", ".py")) +} + +func TestContains_Good(t *testing.T) { + assert.True(t, Contains("hello world", "world")) + assert.False(t, Contains("hello world", "mars")) +} + +func TestSplit_Good(t *testing.T) { + assert.Equal(t, []string{"a", "b", "c"}, Split("a/b/c", "/")) +} + +func TestSplitN_Good(t *testing.T) { + assert.Equal(t, []string{"key", "value=extra"}, SplitN("key=value=extra", "=", 2)) +} + +func TestJoin_Good(t *testing.T) { + assert.Equal(t, "a/b/c", Join("/", "a", "b", "c")) +} + +func TestReplace_Good(t *testing.T) { + assert.Equal(t, "deploy.to.homelab", Replace("deploy/to/homelab", "/", ".")) +} + +func TestLower_Good(t *testing.T) { + assert.Equal(t, "hello", Lower("HELLO")) +} + +func TestUpper_Good(t *testing.T) { + assert.Equal(t, "HELLO", Upper("hello")) +} + +func TestTrim_Good(t *testing.T) { + assert.Equal(t, "hello", Trim(" hello ")) +} + +func TestRuneCount_Good(t *testing.T) { + assert.Equal(t, 5, RuneCount("hello")) + assert.Equal(t, 1, RuneCount("🔥")) + assert.Equal(t, 0, RuneCount("")) +} diff --git a/task.go b/task.go new file mode 100644 index 0000000..acdf394 --- /dev/null +++ b/task.go @@ -0,0 +1,92 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Background task dispatch for the Core framework. + +package core + +import ( + "reflect" + "slices" + "strconv" +) + +// TaskState holds background task state. +type TaskState struct { + Identifier string + Task Task + Result any + Error error +} + +// PerformAsync dispatches a task in a background goroutine. +func (c *Core) PerformAsync(t Task) Result { + if c.shutdown.Load() { + return Result{} + } + taskID := Concat("task-", strconv.FormatUint(c.taskIDCounter.Add(1), 10)) + if tid, ok := t.(TaskWithIdentifier); ok { + tid.SetTaskIdentifier(taskID) + } + c.ACTION(ActionTaskStarted{TaskIdentifier: taskID, Task: t}) + c.waitGroup.Go(func() { + defer func() { + if rec := recover(); rec != nil { + err := E("core.PerformAsync", Sprint("panic: ", rec), nil) + c.ACTION(ActionTaskCompleted{TaskIdentifier: taskID, Task: t, Result: nil, Error: err}) + } + }() + r := c.PERFORM(t) + var err error + if !r.OK { + if e, ok := r.Value.(error); ok { + err = e + } else { + taskType := reflect.TypeOf(t) + typeName := "" + if taskType != nil { + typeName = taskType.String() + } + err = E("core.PerformAsync", Join(" ", "no handler found for task type", typeName), nil) + } + } + c.ACTION(ActionTaskCompleted{TaskIdentifier: taskID, Task: t, Result: r.Value, Error: err}) + }) + return Result{taskID, true} +} + +// Progress broadcasts a progress update for a background task. +func (c *Core) Progress(taskID string, progress float64, message string, t Task) { + c.ACTION(ActionTaskProgress{TaskIdentifier: taskID, Task: t, Progress: progress, Message: message}) +} + +func (c *Core) Perform(t Task) Result { + c.ipc.taskMu.RLock() + handlers := slices.Clone(c.ipc.taskHandlers) + c.ipc.taskMu.RUnlock() + + for _, h := range handlers { + r := h(c, t) + if r.OK { + return r + } + } + return Result{} +} + +func (c *Core) RegisterAction(handler func(*Core, Message) Result) { + c.ipc.ipcMu.Lock() + c.ipc.ipcHandlers = append(c.ipc.ipcHandlers, handler) + c.ipc.ipcMu.Unlock() +} + +func (c *Core) RegisterActions(handlers ...func(*Core, Message) Result) { + c.ipc.ipcMu.Lock() + c.ipc.ipcHandlers = append(c.ipc.ipcHandlers, handlers...) + c.ipc.ipcMu.Unlock() +} + +func (c *Core) RegisterTask(handler TaskHandler) { + c.ipc.taskMu.Lock() + c.ipc.taskHandlers = append(c.ipc.taskHandlers, handler) + c.ipc.taskMu.Unlock() +} diff --git a/task_test.go b/task_test.go new file mode 100644 index 0000000..5e70efd --- /dev/null +++ b/task_test.go @@ -0,0 +1,125 @@ +package core_test + +import ( + "context" + "sync" + "testing" + "time" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- PerformAsync --- + +func TestPerformAsync_Good(t *testing.T) { + c := New().Value.(*Core) + var mu sync.Mutex + var result string + + c.RegisterTask(func(_ *Core, task Task) Result { + mu.Lock() + result = "done" + mu.Unlock() + return Result{"completed", true} + }) + + r := c.PerformAsync("work") + assert.True(t, r.OK) + taskID := r.Value.(string) + assert.NotEmpty(t, taskID) + + time.Sleep(100 * time.Millisecond) + + mu.Lock() + assert.Equal(t, "done", result) + mu.Unlock() +} + +func TestPerformAsync_Progress_Good(t *testing.T) { + c := New().Value.(*Core) + c.RegisterTask(func(_ *Core, task Task) Result { + return Result{OK: true} + }) + + r := c.PerformAsync("work") + taskID := r.Value.(string) + c.Progress(taskID, 0.5, "halfway", "work") +} + +func TestPerformAsync_Completion_Good(t *testing.T) { + c := New().Value.(*Core) + completed := make(chan ActionTaskCompleted, 1) + + c.RegisterTask(func(_ *Core, task Task) Result { + return Result{Value: "result", OK: true} + }) + c.RegisterAction(func(_ *Core, msg Message) Result { + if evt, ok := msg.(ActionTaskCompleted); ok { + completed <- evt + } + return Result{OK: true} + }) + + c.PerformAsync("work") + + select { + case evt := <-completed: + assert.Nil(t, evt.Error) + assert.Equal(t, "result", evt.Result) + case <-time.After(2 * time.Second): + t.Fatal("timed out waiting for completion") + } +} + +func TestPerformAsync_NoHandler_Good(t *testing.T) { + c := New().Value.(*Core) + completed := make(chan ActionTaskCompleted, 1) + + c.RegisterAction(func(_ *Core, msg Message) Result { + if evt, ok := msg.(ActionTaskCompleted); ok { + completed <- evt + } + return Result{OK: true} + }) + + c.PerformAsync("unhandled") + + select { + case evt := <-completed: + assert.NotNil(t, evt.Error) + case <-time.After(2 * time.Second): + t.Fatal("timed out") + } +} + +func TestPerformAsync_AfterShutdown_Bad(t *testing.T) { + c := New().Value.(*Core) + c.ServiceStartup(context.Background(), nil) + c.ServiceShutdown(context.Background()) + + r := c.PerformAsync("should not run") + assert.False(t, r.OK) +} + +// --- RegisterAction + RegisterActions --- + +func TestRegisterAction_Good(t *testing.T) { + c := New().Value.(*Core) + called := false + c.RegisterAction(func(_ *Core, _ Message) Result { + called = true + return Result{OK: true} + }) + c.Action(nil) + assert.True(t, called) +} + +func TestRegisterActions_Good(t *testing.T) { + c := New().Value.(*Core) + count := 0 + h := func(_ *Core, _ Message) Result { count++; return Result{OK: true} } + c.RegisterActions(h, h) + c.Action(nil) + assert.Equal(t, 2, count) +} diff --git a/tasks/plans/2026-01-29-code-signing-design.md b/tasks/plans/2026-01-29-code-signing-design.md deleted file mode 100644 index cedf738..0000000 --- a/tasks/plans/2026-01-29-code-signing-design.md +++ /dev/null @@ -1,236 +0,0 @@ -# Code Signing Design (S3.3) - -## Summary - -Integrate standard code signing tools into the build pipeline. GPG signs checksums by default. macOS codesign + notarization for Apple binaries. Windows signtool deferred. - -## Design Decisions - -- **Sign during build**: Signing happens in `pkg/build/signing/` after compilation, before archiving -- **Config location**: `.core/build.yaml` with environment variable fallbacks for secrets -- **GPG scope**: Signs `checksums.txt` only (standard pattern like Go, Terraform) -- **macOS flow**: Codesign always when identity configured, notarize optional with flag/config -- **Windows**: Placeholder for later implementation - -## Package Structure - -``` -pkg/build/signing/ -├── signer.go # Signer interface + SignConfig -├── gpg.go # GPG checksums signing -├── codesign.go # macOS codesign + notarize -└── signtool.go # Windows placeholder -``` - -## Signer Interface - -```go -// pkg/build/signing/signer.go -type Signer interface { - Name() string - Available() bool - Sign(ctx context.Context, artifact string) error -} - -type SignConfig struct { - Enabled bool `yaml:"enabled"` - GPG GPGConfig `yaml:"gpg,omitempty"` - MacOS MacOSConfig `yaml:"macos,omitempty"` - Windows WindowsConfig `yaml:"windows,omitempty"` -} - -type GPGConfig struct { - Key string `yaml:"key"` // Key ID or fingerprint, supports $ENV -} - -type MacOSConfig struct { - Identity string `yaml:"identity"` // Developer ID Application: ... - Notarize bool `yaml:"notarize"` // Submit to Apple - AppleID string `yaml:"apple_id"` // Apple account email - TeamID string `yaml:"team_id"` // Team ID - AppPassword string `yaml:"app_password"` // App-specific password -} - -type WindowsConfig struct { - Certificate string `yaml:"certificate"` // Path to .pfx - Password string `yaml:"password"` // Certificate password -} -``` - -## Config Schema - -In `.core/build.yaml`: - -```yaml -sign: - enabled: true - - gpg: - key: $GPG_KEY_ID - - macos: - identity: "Developer ID Application: Your Name (TEAM_ID)" - notarize: false - apple_id: $APPLE_ID - team_id: $APPLE_TEAM_ID - app_password: $APPLE_APP_PASSWORD - - # windows: (deferred) - # certificate: $WINDOWS_CERT_PATH - # password: $WINDOWS_CERT_PASSWORD -``` - -## Build Pipeline Integration - -``` -Build() in pkg/build/builders/go.go - ↓ -compile binaries - ↓ -Sign macOS binaries (codesign) ← NEW - ↓ -Notarize if enabled (wait) ← NEW - ↓ -Create archives (tar.gz, zip) - ↓ -Generate checksums.txt - ↓ -GPG sign checksums.txt ← NEW - ↓ -Return artifacts -``` - -## GPG Signer - -```go -// pkg/build/signing/gpg.go -type GPGSigner struct { - KeyID string -} - -func (s *GPGSigner) Name() string { return "gpg" } - -func (s *GPGSigner) Available() bool { - _, err := exec.LookPath("gpg") - return err == nil && s.KeyID != "" -} - -func (s *GPGSigner) Sign(ctx context.Context, file string) error { - cmd := exec.CommandContext(ctx, "gpg", - "--detach-sign", - "--armor", - "--local-user", s.KeyID, - "--output", file+".asc", - file, - ) - return cmd.Run() -} -``` - -**Output:** `checksums.txt.asc` (ASCII armored detached signature) - -**User verification:** -```bash -gpg --verify checksums.txt.asc checksums.txt -sha256sum -c checksums.txt -``` - -## macOS Codesign - -```go -// pkg/build/signing/codesign.go -type MacOSSigner struct { - Identity string - Notarize bool - AppleID string - TeamID string - AppPassword string -} - -func (s *MacOSSigner) Name() string { return "codesign" } - -func (s *MacOSSigner) Available() bool { - if runtime.GOOS != "darwin" { - return false - } - _, err := exec.LookPath("codesign") - return err == nil && s.Identity != "" -} - -func (s *MacOSSigner) Sign(ctx context.Context, binary string) error { - cmd := exec.CommandContext(ctx, "codesign", - "--sign", s.Identity, - "--timestamp", - "--options", "runtime", - "--force", - binary, - ) - return cmd.Run() -} - -func (s *MacOSSigner) NotarizeAndStaple(ctx context.Context, binary string) error { - // 1. Create ZIP for submission - zipPath := binary + ".zip" - exec.CommandContext(ctx, "zip", "-j", zipPath, binary).Run() - defer os.Remove(zipPath) - - // 2. Submit and wait - cmd := exec.CommandContext(ctx, "xcrun", "notarytool", "submit", - zipPath, - "--apple-id", s.AppleID, - "--team-id", s.TeamID, - "--password", s.AppPassword, - "--wait", - ) - if err := cmd.Run(); err != nil { - return fmt.Errorf("notarization failed: %w", err) - } - - // 3. Staple ticket - return exec.CommandContext(ctx, "xcrun", "stapler", "staple", binary).Run() -} -``` - -## CLI Flags - -```bash -core build # Sign with defaults (GPG + codesign if configured) -core build --no-sign # Skip all signing -core build --notarize # Enable macOS notarization (overrides config) -``` - -## Environment Variables - -| Variable | Purpose | -|----------|---------| -| `GPG_KEY_ID` | GPG key ID or fingerprint | -| `CODESIGN_IDENTITY` | macOS Developer ID (fallback) | -| `APPLE_ID` | Apple account email | -| `APPLE_TEAM_ID` | Apple Developer Team ID | -| `APPLE_APP_PASSWORD` | App-specific password for notarization | - -## Deferred - -- **Windows signtool**: Placeholder implementation returning nil -- **Sigstore/keyless signing**: Future consideration -- **Binary-level GPG signatures**: Only checksums.txt signed - -## Implementation Steps - -1. Create `pkg/build/signing/` package structure -2. Implement Signer interface and SignConfig -3. Implement GPGSigner -4. Implement MacOSSigner with codesign -5. Add notarization support to MacOSSigner -6. Add SignConfig to build.Config -7. Integrate signing into build pipeline -8. Add CLI flags (--no-sign, --notarize) -9. Add Windows placeholder -10. Tests with mocked exec - -## Dependencies - -- `gpg` CLI (system) -- `codesign` CLI (macOS Xcode Command Line Tools) -- `xcrun notarytool` (macOS Xcode Command Line Tools) -- `xcrun stapler` (macOS Xcode Command Line Tools) diff --git a/tasks/plans/2026-01-29-code-signing-impl.md b/tasks/plans/2026-01-29-code-signing-impl.md deleted file mode 100644 index 8f6f40c..0000000 --- a/tasks/plans/2026-01-29-code-signing-impl.md +++ /dev/null @@ -1,967 +0,0 @@ -# Code Signing Implementation Plan - -> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. - -**Goal:** Add GPG checksums signing and macOS codesign/notarization to the build pipeline. - -**Architecture:** `pkg/build/signing/` package with Signer interface. GPG signs CHECKSUMS.txt. macOS codesign runs after binary compilation, before archiving. Config in `.core/build.yaml` with env var fallbacks. - -**Tech Stack:** Go, os/exec for gpg/codesign/xcrun CLI tools - ---- - -### Task 1: Create Signing Package Structure - -**Files:** -- Create: `pkg/build/signing/signer.go` - -**Step 1: Create signer.go with interface and config types** - -```go -// Package signing provides code signing for build artifacts. -package signing - -import ( - "context" - "os" - "strings" -) - -// Signer defines the interface for code signing implementations. -type Signer interface { - // Name returns the signer's identifier. - Name() string - // Available checks if this signer can be used. - Available() bool - // Sign signs the artifact at the given path. - Sign(ctx context.Context, path string) error -} - -// SignConfig holds signing configuration from .core/build.yaml. -type SignConfig struct { - Enabled bool `yaml:"enabled"` - GPG GPGConfig `yaml:"gpg,omitempty"` - MacOS MacOSConfig `yaml:"macos,omitempty"` - Windows WindowsConfig `yaml:"windows,omitempty"` -} - -// GPGConfig holds GPG signing configuration. -type GPGConfig struct { - Key string `yaml:"key"` // Key ID or fingerprint, supports $ENV -} - -// MacOSConfig holds macOS codesign configuration. -type MacOSConfig struct { - Identity string `yaml:"identity"` // Developer ID Application: ... - Notarize bool `yaml:"notarize"` // Submit to Apple for notarization - AppleID string `yaml:"apple_id"` // Apple account email - TeamID string `yaml:"team_id"` // Team ID - AppPassword string `yaml:"app_password"` // App-specific password -} - -// WindowsConfig holds Windows signtool configuration (placeholder). -type WindowsConfig struct { - Certificate string `yaml:"certificate"` // Path to .pfx - Password string `yaml:"password"` // Certificate password -} - -// DefaultSignConfig returns sensible defaults. -func DefaultSignConfig() SignConfig { - return SignConfig{ - Enabled: true, - GPG: GPGConfig{ - Key: os.Getenv("GPG_KEY_ID"), - }, - MacOS: MacOSConfig{ - Identity: os.Getenv("CODESIGN_IDENTITY"), - AppleID: os.Getenv("APPLE_ID"), - TeamID: os.Getenv("APPLE_TEAM_ID"), - AppPassword: os.Getenv("APPLE_APP_PASSWORD"), - }, - } -} - -// ExpandEnv expands environment variables in config values. -func (c *SignConfig) ExpandEnv() { - c.GPG.Key = expandEnv(c.GPG.Key) - c.MacOS.Identity = expandEnv(c.MacOS.Identity) - c.MacOS.AppleID = expandEnv(c.MacOS.AppleID) - c.MacOS.TeamID = expandEnv(c.MacOS.TeamID) - c.MacOS.AppPassword = expandEnv(c.MacOS.AppPassword) - c.Windows.Certificate = expandEnv(c.Windows.Certificate) - c.Windows.Password = expandEnv(c.Windows.Password) -} - -// expandEnv expands $VAR or ${VAR} in a string. -func expandEnv(s string) string { - if strings.HasPrefix(s, "$") { - return os.ExpandEnv(s) - } - return s -} -``` - -**Step 2: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./pkg/build/signing/...` -Expected: No errors - -**Step 3: Commit** - -```bash -git add pkg/build/signing/signer.go -git commit -m "feat(signing): add Signer interface and config types - -Defines interface for GPG, macOS, and Windows signing. -Config supports env var expansion for secrets. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 2: Implement GPG Signer - -**Files:** -- Create: `pkg/build/signing/gpg.go` -- Create: `pkg/build/signing/gpg_test.go` - -**Step 1: Write the failing test** - -```go -package signing - -import ( - "testing" -) - -func TestGPGSigner_Good_Name(t *testing.T) { - s := NewGPGSigner("ABCD1234") - if s.Name() != "gpg" { - t.Errorf("expected name 'gpg', got %q", s.Name()) - } -} - -func TestGPGSigner_Good_Available(t *testing.T) { - s := NewGPGSigner("ABCD1234") - // Available depends on gpg being installed - _ = s.Available() -} - -func TestGPGSigner_Bad_NoKey(t *testing.T) { - s := NewGPGSigner("") - if s.Available() { - t.Error("expected Available() to be false when key is empty") - } -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/build/signing/... -run TestGPGSigner -v` -Expected: FAIL (NewGPGSigner not defined) - -**Step 3: Write implementation** - -```go -package signing - -import ( - "context" - "fmt" - "os/exec" -) - -// GPGSigner signs files using GPG. -type GPGSigner struct { - KeyID string -} - -// Compile-time interface check. -var _ Signer = (*GPGSigner)(nil) - -// NewGPGSigner creates a new GPG signer. -func NewGPGSigner(keyID string) *GPGSigner { - return &GPGSigner{KeyID: keyID} -} - -// Name returns "gpg". -func (s *GPGSigner) Name() string { - return "gpg" -} - -// Available checks if gpg is installed and key is configured. -func (s *GPGSigner) Available() bool { - if s.KeyID == "" { - return false - } - _, err := exec.LookPath("gpg") - return err == nil -} - -// Sign creates a detached ASCII-armored signature. -// For file.txt, creates file.txt.asc -func (s *GPGSigner) Sign(ctx context.Context, file string) error { - if !s.Available() { - return fmt.Errorf("gpg.Sign: gpg not available or key not configured") - } - - cmd := exec.CommandContext(ctx, "gpg", - "--detach-sign", - "--armor", - "--local-user", s.KeyID, - "--output", file+".asc", - file, - ) - - output, err := cmd.CombinedOutput() - if err != nil { - return fmt.Errorf("gpg.Sign: %w\nOutput: %s", err, string(output)) - } - - return nil -} -``` - -**Step 4: Run tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/build/signing/... -run TestGPGSigner -v` -Expected: PASS - -**Step 5: Commit** - -```bash -git add pkg/build/signing/gpg.go pkg/build/signing/gpg_test.go -git commit -m "feat(signing): add GPG signer - -Signs files with detached ASCII-armored signatures (.asc). - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 3: Implement macOS Codesign - -**Files:** -- Create: `pkg/build/signing/codesign.go` -- Create: `pkg/build/signing/codesign_test.go` - -**Step 1: Write the failing test** - -```go -package signing - -import ( - "runtime" - "testing" -) - -func TestMacOSSigner_Good_Name(t *testing.T) { - s := NewMacOSSigner(MacOSConfig{Identity: "Developer ID Application: Test"}) - if s.Name() != "codesign" { - t.Errorf("expected name 'codesign', got %q", s.Name()) - } -} - -func TestMacOSSigner_Good_Available(t *testing.T) { - s := NewMacOSSigner(MacOSConfig{Identity: "Developer ID Application: Test"}) - - // Only available on macOS with identity set - if runtime.GOOS == "darwin" { - // May or may not be available depending on Xcode - _ = s.Available() - } else { - if s.Available() { - t.Error("expected Available() to be false on non-macOS") - } - } -} - -func TestMacOSSigner_Bad_NoIdentity(t *testing.T) { - s := NewMacOSSigner(MacOSConfig{}) - if s.Available() { - t.Error("expected Available() to be false when identity is empty") - } -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/build/signing/... -run TestMacOSSigner -v` -Expected: FAIL (NewMacOSSigner not defined) - -**Step 3: Write implementation** - -```go -package signing - -import ( - "context" - "fmt" - "os" - "os/exec" - "runtime" -) - -// MacOSSigner signs binaries using macOS codesign. -type MacOSSigner struct { - config MacOSConfig -} - -// Compile-time interface check. -var _ Signer = (*MacOSSigner)(nil) - -// NewMacOSSigner creates a new macOS signer. -func NewMacOSSigner(cfg MacOSConfig) *MacOSSigner { - return &MacOSSigner{config: cfg} -} - -// Name returns "codesign". -func (s *MacOSSigner) Name() string { - return "codesign" -} - -// Available checks if running on macOS with codesign and identity configured. -func (s *MacOSSigner) Available() bool { - if runtime.GOOS != "darwin" { - return false - } - if s.config.Identity == "" { - return false - } - _, err := exec.LookPath("codesign") - return err == nil -} - -// Sign codesigns a binary with hardened runtime. -func (s *MacOSSigner) Sign(ctx context.Context, binary string) error { - if !s.Available() { - return fmt.Errorf("codesign.Sign: codesign not available") - } - - cmd := exec.CommandContext(ctx, "codesign", - "--sign", s.config.Identity, - "--timestamp", - "--options", "runtime", // Hardened runtime for notarization - "--force", - binary, - ) - - output, err := cmd.CombinedOutput() - if err != nil { - return fmt.Errorf("codesign.Sign: %w\nOutput: %s", err, string(output)) - } - - return nil -} - -// Notarize submits binary to Apple for notarization and staples the ticket. -// This blocks until Apple responds (typically 1-5 minutes). -func (s *MacOSSigner) Notarize(ctx context.Context, binary string) error { - if s.config.AppleID == "" || s.config.TeamID == "" || s.config.AppPassword == "" { - return fmt.Errorf("codesign.Notarize: missing Apple credentials (apple_id, team_id, app_password)") - } - - // Create ZIP for submission - zipPath := binary + ".zip" - zipCmd := exec.CommandContext(ctx, "zip", "-j", zipPath, binary) - if output, err := zipCmd.CombinedOutput(); err != nil { - return fmt.Errorf("codesign.Notarize: failed to create zip: %w\nOutput: %s", err, string(output)) - } - defer os.Remove(zipPath) - - // Submit to Apple and wait - submitCmd := exec.CommandContext(ctx, "xcrun", "notarytool", "submit", - zipPath, - "--apple-id", s.config.AppleID, - "--team-id", s.config.TeamID, - "--password", s.config.AppPassword, - "--wait", - ) - if output, err := submitCmd.CombinedOutput(); err != nil { - return fmt.Errorf("codesign.Notarize: notarization failed: %w\nOutput: %s", err, string(output)) - } - - // Staple the ticket - stapleCmd := exec.CommandContext(ctx, "xcrun", "stapler", "staple", binary) - if output, err := stapleCmd.CombinedOutput(); err != nil { - return fmt.Errorf("codesign.Notarize: failed to staple: %w\nOutput: %s", err, string(output)) - } - - return nil -} - -// ShouldNotarize returns true if notarization is enabled. -func (s *MacOSSigner) ShouldNotarize() bool { - return s.config.Notarize -} -``` - -**Step 4: Run tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/build/signing/... -run TestMacOSSigner -v` -Expected: PASS - -**Step 5: Commit** - -```bash -git add pkg/build/signing/codesign.go pkg/build/signing/codesign_test.go -git commit -m "feat(signing): add macOS codesign + notarization - -Signs binaries with Developer ID and hardened runtime. -Notarization submits to Apple and staples ticket. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 4: Add Windows Placeholder - -**Files:** -- Create: `pkg/build/signing/signtool.go` - -**Step 1: Create placeholder implementation** - -```go -package signing - -import ( - "context" -) - -// WindowsSigner signs binaries using Windows signtool (placeholder). -type WindowsSigner struct { - config WindowsConfig -} - -// Compile-time interface check. -var _ Signer = (*WindowsSigner)(nil) - -// NewWindowsSigner creates a new Windows signer. -func NewWindowsSigner(cfg WindowsConfig) *WindowsSigner { - return &WindowsSigner{config: cfg} -} - -// Name returns "signtool". -func (s *WindowsSigner) Name() string { - return "signtool" -} - -// Available returns false (not yet implemented). -func (s *WindowsSigner) Available() bool { - return false -} - -// Sign is a placeholder that does nothing. -func (s *WindowsSigner) Sign(ctx context.Context, binary string) error { - // TODO: Implement Windows signing - return nil -} -``` - -**Step 2: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./pkg/build/signing/...` -Expected: No errors - -**Step 3: Commit** - -```bash -git add pkg/build/signing/signtool.go -git commit -m "feat(signing): add Windows signtool placeholder - -Placeholder for future Windows code signing support. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 5: Add SignConfig to BuildConfig - -**Files:** -- Modify: `pkg/build/config.go` -- Modify: `pkg/build/config_test.go` - -**Step 1: Add Sign field to BuildConfig** - -In `pkg/build/config.go`, add to the `BuildConfig` struct: - -```go -// Add import -import "github.com/host-uk/core/pkg/build/signing" - -// Add to BuildConfig struct after Targets field: - // Sign contains code signing configuration. - Sign signing.SignConfig `yaml:"sign,omitempty"` -``` - -**Step 2: Update DefaultConfig** - -In `DefaultConfig()`, add: - -```go - Sign: signing.DefaultSignConfig(), -``` - -**Step 3: Update applyDefaults** - -In `applyDefaults()`, add: - -```go - // Expand environment variables in sign config - cfg.Sign.ExpandEnv() -``` - -**Step 4: Add test for sign config loading** - -Add to `pkg/build/config_test.go`: - -```go -func TestLoadConfig_Good_SignConfig(t *testing.T) { - tmpDir := t.TempDir() - coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) - - configContent := `version: 1 -sign: - enabled: true - gpg: - key: "ABCD1234" - macos: - identity: "Developer ID Application: Test" - notarize: true -` - os.WriteFile(filepath.Join(coreDir, "build.yaml"), []byte(configContent), 0644) - - cfg, err := LoadConfig(tmpDir) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - - if !cfg.Sign.Enabled { - t.Error("expected Sign.Enabled to be true") - } - if cfg.Sign.GPG.Key != "ABCD1234" { - t.Errorf("expected GPG.Key 'ABCD1234', got %q", cfg.Sign.GPG.Key) - } - if cfg.Sign.MacOS.Identity != "Developer ID Application: Test" { - t.Errorf("expected MacOS.Identity, got %q", cfg.Sign.MacOS.Identity) - } - if !cfg.Sign.MacOS.Notarize { - t.Error("expected MacOS.Notarize to be true") - } -} -``` - -**Step 5: Run tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/build/... -run TestLoadConfig -v` -Expected: PASS - -**Step 6: Commit** - -```bash -git add pkg/build/config.go pkg/build/config_test.go -git commit -m "feat(build): add SignConfig to BuildConfig - -Loads signing configuration from .core/build.yaml. -Expands environment variables for secrets. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 6: Create Sign Helper Functions - -**Files:** -- Create: `pkg/build/signing/sign.go` - -**Step 1: Create orchestration helpers** - -```go -package signing - -import ( - "context" - "fmt" - "runtime" - - "github.com/host-uk/core/pkg/build" -) - -// SignBinaries signs macOS binaries in the artifacts list. -// Only signs darwin binaries when running on macOS with a configured identity. -func SignBinaries(ctx context.Context, cfg SignConfig, artifacts []build.Artifact) error { - if !cfg.Enabled { - return nil - } - - // Only sign on macOS - if runtime.GOOS != "darwin" { - return nil - } - - signer := NewMacOSSigner(cfg.MacOS) - if !signer.Available() { - return nil // Silently skip if not configured - } - - for _, artifact := range artifacts { - if artifact.OS != "darwin" { - continue - } - - fmt.Printf(" Signing %s...\n", artifact.Path) - if err := signer.Sign(ctx, artifact.Path); err != nil { - return fmt.Errorf("failed to sign %s: %w", artifact.Path, err) - } - } - - return nil -} - -// NotarizeBinaries notarizes macOS binaries if enabled. -func NotarizeBinaries(ctx context.Context, cfg SignConfig, artifacts []build.Artifact) error { - if !cfg.Enabled || !cfg.MacOS.Notarize { - return nil - } - - if runtime.GOOS != "darwin" { - return nil - } - - signer := NewMacOSSigner(cfg.MacOS) - if !signer.Available() { - return fmt.Errorf("notarization requested but codesign not available") - } - - for _, artifact := range artifacts { - if artifact.OS != "darwin" { - continue - } - - fmt.Printf(" Notarizing %s (this may take a few minutes)...\n", artifact.Path) - if err := signer.Notarize(ctx, artifact.Path); err != nil { - return fmt.Errorf("failed to notarize %s: %w", artifact.Path, err) - } - } - - return nil -} - -// SignChecksums signs the checksums file with GPG. -func SignChecksums(ctx context.Context, cfg SignConfig, checksumFile string) error { - if !cfg.Enabled { - return nil - } - - signer := NewGPGSigner(cfg.GPG.Key) - if !signer.Available() { - return nil // Silently skip if not configured - } - - fmt.Printf(" Signing %s with GPG...\n", checksumFile) - if err := signer.Sign(ctx, checksumFile); err != nil { - return fmt.Errorf("failed to sign checksums: %w", err) - } - - return nil -} -``` - -**Step 2: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./pkg/build/signing/...` -Expected: No errors - -**Step 3: Commit** - -```bash -git add pkg/build/signing/sign.go -git commit -m "feat(signing): add orchestration helpers - -SignBinaries, NotarizeBinaries, SignChecksums for pipeline integration. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 7: Integrate Signing into CLI - -**Files:** -- Modify: `cmd/core/cmd/build.go` - -**Step 1: Add --no-sign and --notarize flags** - -After the existing flag declarations (around line 74), add: - -```go - var noSign bool - var notarize bool - - buildCmd.BoolFlag("no-sign", "Skip all code signing", &noSign) - buildCmd.BoolFlag("notarize", "Enable macOS notarization (requires Apple credentials)", ¬arize) -``` - -**Step 2: Update runProjectBuild signature** - -Update the function signature and call: - -```go -// Update function signature: -func runProjectBuild(buildType string, ciMode bool, targetsFlag string, outputDir string, doArchive bool, doChecksum bool, configPath string, format string, push bool, imageName string, noSign bool, notarize bool) error { - -// Update the Action call: -buildCmd.Action(func() error { - return runProjectBuild(buildType, ciMode, targets, outputDir, doArchive, doChecksum, configPath, format, push, imageName, noSign, notarize) -}) -``` - -**Step 3: Add signing import** - -Add to imports: - -```go - "github.com/host-uk/core/pkg/build/signing" -``` - -**Step 4: Add signing after build, before archive** - -After the build succeeds (around line 228), add: - -```go - // Sign macOS binaries if enabled - signCfg := buildCfg.Sign - if notarize { - signCfg.MacOS.Notarize = true - } - if noSign { - signCfg.Enabled = false - } - - if signCfg.Enabled && runtime.GOOS == "darwin" { - if !ciMode { - fmt.Println() - fmt.Printf("%s Signing binaries...\n", buildHeaderStyle.Render("Sign:")) - } - - if err := signing.SignBinaries(ctx, signCfg, artifacts); err != nil { - if !ciMode { - fmt.Printf("%s Signing failed: %v\n", buildErrorStyle.Render("Error:"), err) - } - return err - } - - if signCfg.MacOS.Notarize { - if err := signing.NotarizeBinaries(ctx, signCfg, artifacts); err != nil { - if !ciMode { - fmt.Printf("%s Notarization failed: %v\n", buildErrorStyle.Render("Error:"), err) - } - return err - } - } - } -``` - -**Step 5: Add GPG signing after checksums** - -After WriteChecksumFile (around line 297), add: - -```go - // Sign checksums with GPG - if signCfg.Enabled { - if err := signing.SignChecksums(ctx, signCfg, checksumPath); err != nil { - if !ciMode { - fmt.Printf("%s GPG signing failed: %v\n", buildErrorStyle.Render("Error:"), err) - } - return err - } - } -``` - -**Step 6: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./cmd/core/...` -Expected: No errors - -**Step 7: Commit** - -```bash -git add cmd/core/cmd/build.go -git commit -m "feat(cli): integrate signing into build command - -Adds --no-sign and --notarize flags. -Signs macOS binaries after build, GPG signs checksums. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 8: Add Integration Test - -**Files:** -- Create: `pkg/build/signing/signing_test.go` - -**Step 1: Create integration test** - -```go -package signing - -import ( - "context" - "os" - "path/filepath" - "runtime" - "testing" - - "github.com/host-uk/core/pkg/build" -) - -func TestSignBinaries_Good_SkipsNonDarwin(t *testing.T) { - ctx := context.Background() - cfg := SignConfig{ - Enabled: true, - MacOS: MacOSConfig{ - Identity: "Developer ID Application: Test", - }, - } - - // Create fake artifact for linux - artifacts := []build.Artifact{ - {Path: "/tmp/test-binary", OS: "linux", Arch: "amd64"}, - } - - // Should not error even though binary doesn't exist (skips non-darwin) - err := SignBinaries(ctx, cfg, artifacts) - if err != nil { - t.Errorf("unexpected error: %v", err) - } -} - -func TestSignBinaries_Good_DisabledConfig(t *testing.T) { - ctx := context.Background() - cfg := SignConfig{ - Enabled: false, - } - - artifacts := []build.Artifact{ - {Path: "/tmp/test-binary", OS: "darwin", Arch: "arm64"}, - } - - err := SignBinaries(ctx, cfg, artifacts) - if err != nil { - t.Errorf("unexpected error: %v", err) - } -} - -func TestSignChecksums_Good_SkipsNoKey(t *testing.T) { - ctx := context.Background() - cfg := SignConfig{ - Enabled: true, - GPG: GPGConfig{ - Key: "", // No key configured - }, - } - - // Should silently skip when no key - err := SignChecksums(ctx, cfg, "/tmp/CHECKSUMS.txt") - if err != nil { - t.Errorf("unexpected error: %v", err) - } -} - -func TestSignChecksums_Good_Disabled(t *testing.T) { - ctx := context.Background() - cfg := SignConfig{ - Enabled: false, - } - - err := SignChecksums(ctx, cfg, "/tmp/CHECKSUMS.txt") - if err != nil { - t.Errorf("unexpected error: %v", err) - } -} -``` - -**Step 2: Run all signing tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/build/signing/... -v` -Expected: All tests pass - -**Step 3: Commit** - -```bash -git add pkg/build/signing/signing_test.go -git commit -m "test(signing): add integration tests - -Tests for skip conditions and disabled configs. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 9: Update TODO.md and Final Verification - -**Step 1: Build CLI** - -Run: `cd /Users/snider/Code/Core && go build -o bin/core ./cmd/core` -Expected: No errors - -**Step 2: Test help output** - -Run: `./bin/core build --help` -Expected: Shows --no-sign and --notarize flags - -**Step 3: Run all tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/build/... -v` -Expected: All tests pass - -**Step 4: Update TODO.md** - -Mark S3.3 tasks as complete in `tasks/TODO.md`: - -```markdown -### S3.3 Code Signing (Standard) ✅ -- [x] macOS codesign integration -- [x] macOS notarization -- [ ] Windows signtool integration (placeholder added) -- [x] GPG signing (standard tools) -``` - -**Step 5: Final commit** - -```bash -git add tasks/TODO.md -git commit -m "chore(signing): finalize S3.3 code signing - -Implemented: -- GPG signing of CHECKSUMS.txt -- macOS codesign with hardened runtime -- macOS notarization via notarytool -- Windows signtool placeholder - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -## Summary - -9 tasks covering: -1. Signing package structure (Signer interface, SignConfig) -2. GPG signer implementation -3. macOS codesign + notarization -4. Windows signtool placeholder -5. Add SignConfig to BuildConfig -6. Orchestration helpers (SignBinaries, SignChecksums) -7. CLI integration (--no-sign, --notarize) -8. Integration tests -9. Final verification and TODO update diff --git a/tasks/plans/2026-01-29-core-devops-design.md b/tasks/plans/2026-01-29-core-devops-design.md deleted file mode 100644 index 1b66e67..0000000 --- a/tasks/plans/2026-01-29-core-devops-design.md +++ /dev/null @@ -1,306 +0,0 @@ -# Core DevOps CLI Design (S4.6) - -## Summary - -Portable development environment CLI commands for the core-devops LinuxKit image. Provides a sandboxed, immutable environment with 100+ embedded tools. - -## Design Decisions - -- **Image sources**: GitHub Releases + Container Registry + CDN (try in order, configurable) -- **Local storage**: `~/.core/images/` with `CORE_IMAGES_DIR` env override -- **Shell connection**: SSH by default, `--console` for serial fallback -- **Serve**: Mount PWD into VM via 9P/SSHFS, run auto-detected dev server -- **Test**: Auto-detect framework + `.core/test.yaml` config + `--` override -- **Update**: Simple hash/version check, `--force` to always download -- **Claude sandbox**: SSH in with forwarded auth, safe experimentation in immutable image - -## Package Structure - -``` -pkg/devops/ -├── devops.go # DevOps struct, Boot/Stop/Status -├── images.go # ImageManager, manifest handling -├── mount.go # Directory mounting (9P, SSHFS) -├── serve.go # Project detection, serve command -├── test.go # Test detection, .core/test.yaml parsing -├── config.go # ~/.core/config.yaml handling -└── sources/ - ├── source.go # ImageSource interface - ├── github.go # GitHub Releases - ├── registry.go # Container registry - └── cdn.go # CDN/S3 - -cmd/core/cmd/dev.go # CLI commands -``` - -## Image Storage - -``` -~/.core/ -├── config.yaml # Global config (image source preference, etc.) -└── images/ - ├── core-devops-darwin-arm64.qcow2 - ├── core-devops-darwin-amd64.qcow2 - ├── core-devops-linux-amd64.qcow2 - └── manifest.json # Tracks versions, hashes, last-updated -``` - -## ImageSource Interface - -```go -type ImageSource interface { - Name() string - Available() bool - LatestVersion() (string, error) - Download(ctx context.Context, dest string) error -} -``` - -Sources tried in order: GitHub → Registry → CDN, or respect user preference in config. - -## CLI Commands - -```go -// cmd/core/cmd/dev.go - -func AddDevCommand(app *clir.Cli) { - devCmd := app.NewSubCommand("dev", "Portable development environment") - - // core dev install [--source github|registry|cdn] - // Downloads core-devops image for current platform - - // core dev boot [--memory 4096] [--cpus 4] [--name mydev] - // Boots the dev environment (detached by default) - - // core dev shell [--console] - // SSH into running dev env (or serial console with --console) - - // core dev serve [--port 8000] - // Mount PWD → /app, run FrankenPHP, forward port - - // core dev test [-- custom command] - // Auto-detect tests or use .core/test.yaml or pass custom - - // core dev claude [--auth] [--model opus|sonnet] - // SSH in with forwarded auth, start Claude in sandbox - - // core dev update [--force] - // Check for newer image, download if available - - // core dev status - // Show if dev env is running, resource usage, ports - - // core dev stop - // Stop the running dev environment -} -``` - -## Command Flow - -``` -First time: - core dev install → Downloads ~/.core/images/core-devops-{os}-{arch}.qcow2 - core dev boot → Starts VM in background - core dev shell → SSH in - -Daily use: - core dev boot → Start (if not running) - core dev serve → Mount project, start server - core dev test → Run tests inside VM - core dev shell → Interactive work - -AI sandbox: - core dev claude → SSH + forward auth + start Claude CLI - -Maintenance: - core dev update → Get latest image - core dev status → Check what's running -``` - -## `core dev claude` - Sandboxed AI Session - -```bash -core dev claude # Forward all auth by default -core dev claude --no-auth # Clean session, no host credentials -core dev claude --auth=gh,anthropic # Selective forwarding -``` - -**What it does:** -1. Ensures dev VM is running (auto-boots if not) -2. Forwards auth credentials from host: - - `~/.anthropic/` or `ANTHROPIC_API_KEY` - - `~/.config/gh/` (GitHub CLI auth) - - SSH agent forwarding - - Git config (name, email) -3. SSHs into VM with agent forwarding (`ssh -A`) -4. Starts `claude` CLI inside with forwarded context -5. Current project mounted at `/app` - -**Why this is powerful:** -- Immutable base = reset anytime with `core dev boot --fresh` -- Claude can experiment freely, install packages, make mistakes -- Host system untouched -- Still has real credentials to push code, create PRs -- Full 100+ tools available in core-devops image - -## Test Configuration - -**`.core/test.yaml` format:** -```yaml -version: 1 - -# Commands to run (in order) -commands: - - name: unit - run: vendor/bin/pest --parallel - - name: types - run: vendor/bin/phpstan analyse - - name: lint - run: vendor/bin/pint --test - -# Or simple single command -command: npm test - -# Environment variables -env: - APP_ENV: testing - DB_CONNECTION: sqlite -``` - -**Auto-Detection Priority:** -1. `.core/test.yaml` -2. `composer.json` scripts.test → `composer test` -3. `package.json` scripts.test → `npm test` -4. `go.mod` → `go test ./...` -5. `pytest.ini` or `pyproject.toml` → `pytest` -6. `Taskfile.yaml` → `task test` - -**CLI Usage:** -```bash -core dev test # Auto-detect and run -core dev test --unit # Run only "unit" from .core/test.yaml -core dev test -- go test -v ./pkg/... # Override with custom -``` - -## `core dev serve` - Mount & Serve - -**How it works:** -1. Ensure VM is running -2. Mount current directory into VM via 9P virtio-fs (or SSHFS fallback) -3. Start auto-detected dev server on /app inside VM -4. Forward port to host - -**Mount Strategy:** -```go -type MountMethod int -const ( - Mount9P MountMethod = iota // QEMU virtio-9p (faster) - MountSSHFS // sshfs reverse mount - MountRSync // Fallback: rsync on change -) -``` - -**CLI Usage:** -```bash -core dev serve # Mount PWD, serve on :8000 -core dev serve --port 3000 # Custom port -core dev serve --path ./backend # Serve subdirectory -``` - -**Project Detection:** -```go -func detectServeCommand(projectDir string) string { - if exists("artisan") { - return "php artisan octane:start --host=0.0.0.0 --port=8000" - } - if exists("package.json") && hasScript("dev") { - return "npm run dev -- --host 0.0.0.0" - } - if exists("composer.json") { - return "frankenphp php-server" - } - return "python -m http.server 8000" // Fallback -} -``` - -## Image Sources & Updates - -**~/.core/config.yaml:** -```yaml -version: 1 - -images: - source: auto # auto | github | registry | cdn - - cdn: - url: https://images.example.com/core-devops - - github: - repo: host-uk/core-images - - registry: - image: ghcr.io/host-uk/core-devops -``` - -**Manifest for Update Checking:** -```json -// ~/.core/images/manifest.json -{ - "core-devops-darwin-arm64.qcow2": { - "version": "v1.2.0", - "sha256": "abc123...", - "downloaded": "2026-01-29T10:00:00Z", - "source": "github" - } -} -``` - -**Update Flow:** -```go -func (d *DevOps) Update(force bool) error { - local := d.manifest.Get(imageName) - remote, _ := d.source.LatestVersion() - - if force || local.Version != remote { - fmt.Printf("Updating %s → %s\n", local.Version, remote) - return d.source.Download(ctx, imagePath) - } - fmt.Println("Already up to date") - return nil -} -``` - -## Commands Summary - -| Command | Description | -|---------|-------------| -| `core dev install` | Download image for platform | -| `core dev boot` | Start VM (auto-installs if needed) | -| `core dev shell` | SSH in (--console for serial) | -| `core dev serve` | Mount PWD, run dev server | -| `core dev test` | Run tests inside VM | -| `core dev claude` | Start Claude session in sandbox | -| `core dev update` | Check/download newer image | -| `core dev status` | Show VM state, ports, resources | -| `core dev stop` | Stop the VM | - -## Dependencies - -- Reuse existing `pkg/container` for VM management (LinuxKitManager) -- SSH client for shell/exec (golang.org/x/crypto/ssh) -- Progress bar for downloads (charmbracelet/bubbles or similar) - -## Implementation Steps - -1. Create `pkg/devops/` package structure -2. Implement ImageSource interface and sources (GitHub, Registry, CDN) -3. Implement image download with manifest tracking -4. Implement config loading (`~/.core/config.yaml`) -5. Add CLI commands to `cmd/core/cmd/dev.go` -6. Implement boot/stop using existing LinuxKitManager -7. Implement shell (SSH + serial console) -8. Implement serve (mount + project detection) -9. Implement test (detection + .core/test.yaml) -10. Implement claude (auth forwarding + sandbox) -11. Implement update (version check + download) -12. Implement status diff --git a/tasks/plans/2026-01-29-core-devops-impl.md b/tasks/plans/2026-01-29-core-devops-impl.md deleted file mode 100644 index e1b08d0..0000000 --- a/tasks/plans/2026-01-29-core-devops-impl.md +++ /dev/null @@ -1,2183 +0,0 @@ -# Core DevOps CLI Implementation Plan - -> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. - -**Goal:** Implement `core dev` commands for portable development environment using core-devops LinuxKit images. - -**Architecture:** `pkg/devops` package handles image management, config, and orchestration. Reuses `pkg/container.LinuxKitManager` for VM lifecycle. Image sources (GitHub, Registry, CDN) implement common interface. CLI in `cmd/core/cmd/dev.go`. - -**Tech Stack:** Go, pkg/container, golang.org/x/crypto/ssh, os/exec for gh CLI, YAML config - ---- - -### Task 1: Create DevOps Package Structure - -**Files:** -- Create: `pkg/devops/devops.go` -- Create: `pkg/devops/go.mod` - -**Step 1: Create go.mod** - -```go -module github.com/host-uk/core/pkg/devops - -go 1.25 - -require ( - github.com/host-uk/core/pkg/container v0.0.0 - golang.org/x/crypto v0.32.0 - gopkg.in/yaml.v3 v3.0.1 -) - -replace github.com/host-uk/core/pkg/container => ../container -``` - -**Step 2: Create devops.go with core types** - -```go -// Package devops provides a portable development environment using LinuxKit images. -package devops - -import ( - "context" - "fmt" - "os" - "path/filepath" - "runtime" - - "github.com/host-uk/core/pkg/container" -) - -// DevOps manages the portable development environment. -type DevOps struct { - config *Config - images *ImageManager - container *container.LinuxKitManager -} - -// New creates a new DevOps instance. -func New() (*DevOps, error) { - cfg, err := LoadConfig() - if err != nil { - return nil, fmt.Errorf("devops.New: failed to load config: %w", err) - } - - images, err := NewImageManager(cfg) - if err != nil { - return nil, fmt.Errorf("devops.New: failed to create image manager: %w", err) - } - - mgr, err := container.NewLinuxKitManager() - if err != nil { - return nil, fmt.Errorf("devops.New: failed to create container manager: %w", err) - } - - return &DevOps{ - config: cfg, - images: images, - container: mgr, - }, nil -} - -// ImageName returns the platform-specific image name. -func ImageName() string { - return fmt.Sprintf("core-devops-%s-%s.qcow2", runtime.GOOS, runtime.GOARCH) -} - -// ImagesDir returns the path to the images directory. -func ImagesDir() (string, error) { - if dir := os.Getenv("CORE_IMAGES_DIR"); dir != "" { - return dir, nil - } - home, err := os.UserHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".core", "images"), nil -} - -// ImagePath returns the full path to the platform-specific image. -func ImagePath() (string, error) { - dir, err := ImagesDir() - if err != nil { - return "", err - } - return filepath.Join(dir, ImageName()), nil -} - -// IsInstalled checks if the dev image is installed. -func (d *DevOps) IsInstalled() bool { - path, err := ImagePath() - if err != nil { - return false - } - _, err = os.Stat(path) - return err == nil -} -``` - -**Step 3: Add to go.work** - -Run: `cd /Users/snider/Code/Core && echo " ./pkg/devops" >> go.work && go work sync` - -**Step 4: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./pkg/devops/...` -Expected: Error (missing Config, ImageManager) - that's OK for now - -**Step 5: Commit** - -```bash -git add pkg/devops/ -git add go.work go.work.sum -git commit -m "feat(devops): add package structure - -Initial pkg/devops setup with DevOps type and path helpers. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 2: Implement Config Loading - -**Files:** -- Create: `pkg/devops/config.go` -- Create: `pkg/devops/config_test.go` - -**Step 1: Write the failing test** - -```go -package devops - -import ( - "os" - "path/filepath" - "testing" -) - -func TestLoadConfig_Good_Default(t *testing.T) { - // Use temp home dir - tmpDir := t.TempDir() - t.Setenv("HOME", tmpDir) - - cfg, err := LoadConfig() - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - if cfg.Images.Source != "auto" { - t.Errorf("expected source 'auto', got %q", cfg.Images.Source) - } -} - -func TestLoadConfig_Good_FromFile(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("HOME", tmpDir) - - configDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(configDir, 0755) - - configContent := `version: 1 -images: - source: github - github: - repo: myorg/images -` - os.WriteFile(filepath.Join(configDir, "config.yaml"), []byte(configContent), 0644) - - cfg, err := LoadConfig() - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - if cfg.Images.Source != "github" { - t.Errorf("expected source 'github', got %q", cfg.Images.Source) - } - if cfg.Images.GitHub.Repo != "myorg/images" { - t.Errorf("expected repo 'myorg/images', got %q", cfg.Images.GitHub.Repo) - } -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/devops/... -run TestLoadConfig -v` -Expected: FAIL (LoadConfig not defined) - -**Step 3: Write implementation** - -```go -package devops - -import ( - "os" - "path/filepath" - - "gopkg.in/yaml.v3" -) - -// Config holds global devops configuration from ~/.core/config.yaml. -type Config struct { - Version int `yaml:"version"` - Images ImagesConfig `yaml:"images"` -} - -// ImagesConfig holds image source configuration. -type ImagesConfig struct { - Source string `yaml:"source"` // auto, github, registry, cdn - GitHub GitHubConfig `yaml:"github,omitempty"` - Registry RegistryConfig `yaml:"registry,omitempty"` - CDN CDNConfig `yaml:"cdn,omitempty"` -} - -// GitHubConfig holds GitHub Releases configuration. -type GitHubConfig struct { - Repo string `yaml:"repo"` // owner/repo format -} - -// RegistryConfig holds container registry configuration. -type RegistryConfig struct { - Image string `yaml:"image"` // e.g., ghcr.io/host-uk/core-devops -} - -// CDNConfig holds CDN/S3 configuration. -type CDNConfig struct { - URL string `yaml:"url"` // base URL for downloads -} - -// DefaultConfig returns sensible defaults. -func DefaultConfig() *Config { - return &Config{ - Version: 1, - Images: ImagesConfig{ - Source: "auto", - GitHub: GitHubConfig{ - Repo: "host-uk/core-images", - }, - Registry: RegistryConfig{ - Image: "ghcr.io/host-uk/core-devops", - }, - }, - } -} - -// ConfigPath returns the path to the config file. -func ConfigPath() (string, error) { - home, err := os.UserHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".core", "config.yaml"), nil -} - -// LoadConfig loads configuration from ~/.core/config.yaml. -// Returns default config if file doesn't exist. -func LoadConfig() (*Config, error) { - configPath, err := ConfigPath() - if err != nil { - return DefaultConfig(), nil - } - - data, err := os.ReadFile(configPath) - if err != nil { - if os.IsNotExist(err) { - return DefaultConfig(), nil - } - return nil, err - } - - cfg := DefaultConfig() - if err := yaml.Unmarshal(data, cfg); err != nil { - return nil, err - } - - return cfg, nil -} -``` - -**Step 4: Run tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/devops/... -run TestLoadConfig -v` -Expected: PASS - -**Step 5: Commit** - -```bash -git add pkg/devops/config.go pkg/devops/config_test.go -git commit -m "feat(devops): add config loading - -Loads ~/.core/config.yaml with image source preferences. -Defaults to auto-detection with host-uk/core-images. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 3: Implement ImageSource Interface - -**Files:** -- Create: `pkg/devops/sources/source.go` - -**Step 1: Create source interface** - -```go -// Package sources provides image download sources for core-devops. -package sources - -import ( - "context" -) - -// ImageSource defines the interface for downloading dev images. -type ImageSource interface { - // Name returns the source identifier. - Name() string - // Available checks if this source can be used. - Available() bool - // LatestVersion returns the latest available version. - LatestVersion(ctx context.Context) (string, error) - // Download downloads the image to the destination path. - // Reports progress via the callback if provided. - Download(ctx context.Context, dest string, progress func(downloaded, total int64)) error -} - -// SourceConfig holds configuration for a source. -type SourceConfig struct { - // GitHub configuration - GitHubRepo string - // Registry configuration - RegistryImage string - // CDN configuration - CDNURL string - // Image name (e.g., core-devops-darwin-arm64.qcow2) - ImageName string -} -``` - -**Step 2: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./pkg/devops/...` -Expected: No errors - -**Step 3: Commit** - -```bash -git add pkg/devops/sources/source.go -git commit -m "feat(devops): add ImageSource interface - -Defines common interface for GitHub, Registry, and CDN sources. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 4: Implement GitHub Source - -**Files:** -- Create: `pkg/devops/sources/github.go` -- Create: `pkg/devops/sources/github_test.go` - -**Step 1: Write the failing test** - -```go -package sources - -import ( - "testing" -) - -func TestGitHubSource_Good_Available(t *testing.T) { - src := NewGitHubSource(SourceConfig{ - GitHubRepo: "host-uk/core-images", - ImageName: "core-devops-darwin-arm64.qcow2", - }) - - if src.Name() != "github" { - t.Errorf("expected name 'github', got %q", src.Name()) - } - - // Available depends on gh CLI being installed - _ = src.Available() -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/devops/sources/... -run TestGitHubSource -v` -Expected: FAIL - -**Step 3: Write implementation** - -```go -package sources - -import ( - "context" - "encoding/json" - "fmt" - "os" - "os/exec" - "strings" -) - -// GitHubSource downloads images from GitHub Releases. -type GitHubSource struct { - config SourceConfig -} - -// NewGitHubSource creates a new GitHub source. -func NewGitHubSource(cfg SourceConfig) *GitHubSource { - return &GitHubSource{config: cfg} -} - -// Name returns "github". -func (s *GitHubSource) Name() string { - return "github" -} - -// Available checks if gh CLI is installed and authenticated. -func (s *GitHubSource) Available() bool { - _, err := exec.LookPath("gh") - if err != nil { - return false - } - // Check if authenticated - cmd := exec.Command("gh", "auth", "status") - return cmd.Run() == nil -} - -// LatestVersion returns the latest release tag. -func (s *GitHubSource) LatestVersion(ctx context.Context) (string, error) { - cmd := exec.CommandContext(ctx, "gh", "release", "view", - "-R", s.config.GitHubRepo, - "--json", "tagName", - "-q", ".tagName", - ) - out, err := cmd.Output() - if err != nil { - return "", fmt.Errorf("github.LatestVersion: %w", err) - } - return strings.TrimSpace(string(out)), nil -} - -// Download downloads the image from the latest release. -func (s *GitHubSource) Download(ctx context.Context, dest string, progress func(downloaded, total int64)) error { - // Get release assets to find our image - cmd := exec.CommandContext(ctx, "gh", "release", "download", - "-R", s.config.GitHubRepo, - "-p", s.config.ImageName, - "-D", dest, - "--clobber", - ) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - if err := cmd.Run(); err != nil { - return fmt.Errorf("github.Download: %w", err) - } - return nil -} - -// releaseAsset represents a GitHub release asset. -type releaseAsset struct { - Name string `json:"name"` - Size int64 `json:"size"` - URL string `json:"url"` -} -``` - -**Step 4: Run tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/devops/sources/... -run TestGitHubSource -v` -Expected: PASS - -**Step 5: Commit** - -```bash -git add pkg/devops/sources/github.go pkg/devops/sources/github_test.go -git commit -m "feat(devops): add GitHub Releases source - -Downloads core-devops images from GitHub Releases using gh CLI. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 5: Implement CDN Source - -**Files:** -- Create: `pkg/devops/sources/cdn.go` -- Create: `pkg/devops/sources/cdn_test.go` - -**Step 1: Write the failing test** - -```go -package sources - -import ( - "testing" -) - -func TestCDNSource_Good_Available(t *testing.T) { - src := NewCDNSource(SourceConfig{ - CDNURL: "https://images.example.com", - ImageName: "core-devops-darwin-arm64.qcow2", - }) - - if src.Name() != "cdn" { - t.Errorf("expected name 'cdn', got %q", src.Name()) - } - - // CDN is available if URL is configured - if !src.Available() { - t.Error("expected Available() to be true when URL is set") - } -} - -func TestCDNSource_Bad_NoURL(t *testing.T) { - src := NewCDNSource(SourceConfig{ - ImageName: "core-devops-darwin-arm64.qcow2", - }) - - if src.Available() { - t.Error("expected Available() to be false when URL is empty") - } -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/devops/sources/... -run TestCDNSource -v` -Expected: FAIL - -**Step 3: Write implementation** - -```go -package sources - -import ( - "context" - "fmt" - "io" - "net/http" - "os" - "path/filepath" -) - -// CDNSource downloads images from a CDN or S3 bucket. -type CDNSource struct { - config SourceConfig -} - -// NewCDNSource creates a new CDN source. -func NewCDNSource(cfg SourceConfig) *CDNSource { - return &CDNSource{config: cfg} -} - -// Name returns "cdn". -func (s *CDNSource) Name() string { - return "cdn" -} - -// Available checks if CDN URL is configured. -func (s *CDNSource) Available() bool { - return s.config.CDNURL != "" -} - -// LatestVersion fetches version from manifest or returns "latest". -func (s *CDNSource) LatestVersion(ctx context.Context) (string, error) { - // Try to fetch manifest.json for version info - url := fmt.Sprintf("%s/manifest.json", s.config.CDNURL) - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return "latest", nil - } - - resp, err := http.DefaultClient.Do(req) - if err != nil || resp.StatusCode != 200 { - return "latest", nil - } - defer resp.Body.Close() - - // For now, just return latest - could parse manifest for version - return "latest", nil -} - -// Download downloads the image from CDN. -func (s *CDNSource) Download(ctx context.Context, dest string, progress func(downloaded, total int64)) error { - url := fmt.Sprintf("%s/%s", s.config.CDNURL, s.config.ImageName) - - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return fmt.Errorf("cdn.Download: %w", err) - } - - resp, err := http.DefaultClient.Do(req) - if err != nil { - return fmt.Errorf("cdn.Download: %w", err) - } - defer resp.Body.Close() - - if resp.StatusCode != 200 { - return fmt.Errorf("cdn.Download: HTTP %d", resp.StatusCode) - } - - // Ensure dest directory exists - if err := os.MkdirAll(filepath.Dir(dest), 0755); err != nil { - return fmt.Errorf("cdn.Download: %w", err) - } - - // Create destination file - destPath := filepath.Join(dest, s.config.ImageName) - f, err := os.Create(destPath) - if err != nil { - return fmt.Errorf("cdn.Download: %w", err) - } - defer f.Close() - - // Copy with progress - total := resp.ContentLength - var downloaded int64 - - buf := make([]byte, 32*1024) - for { - n, err := resp.Body.Read(buf) - if n > 0 { - if _, werr := f.Write(buf[:n]); werr != nil { - return fmt.Errorf("cdn.Download: %w", werr) - } - downloaded += int64(n) - if progress != nil { - progress(downloaded, total) - } - } - if err == io.EOF { - break - } - if err != nil { - return fmt.Errorf("cdn.Download: %w", err) - } - } - - return nil -} -``` - -**Step 4: Run tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/devops/sources/... -run TestCDNSource -v` -Expected: PASS - -**Step 5: Commit** - -```bash -git add pkg/devops/sources/cdn.go pkg/devops/sources/cdn_test.go -git commit -m "feat(devops): add CDN/S3 source - -Downloads core-devops images from custom CDN with progress reporting. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 6: Implement ImageManager - -**Files:** -- Create: `pkg/devops/images.go` -- Create: `pkg/devops/images_test.go` - -**Step 1: Write the failing test** - -```go -package devops - -import ( - "os" - "path/filepath" - "testing" -) - -func TestImageManager_Good_IsInstalled(t *testing.T) { - tmpDir := t.TempDir() - t.Setenv("CORE_IMAGES_DIR", tmpDir) - - cfg := DefaultConfig() - mgr, err := NewImageManager(cfg) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - - // Not installed yet - if mgr.IsInstalled() { - t.Error("expected IsInstalled() to be false") - } - - // Create fake image - imagePath := filepath.Join(tmpDir, ImageName()) - os.WriteFile(imagePath, []byte("fake"), 0644) - - // Now installed - if !mgr.IsInstalled() { - t.Error("expected IsInstalled() to be true") - } -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/devops/... -run TestImageManager -v` -Expected: FAIL - -**Step 3: Write implementation** - -```go -package devops - -import ( - "context" - "encoding/json" - "fmt" - "os" - "path/filepath" - "time" - - "github.com/host-uk/core/pkg/devops/sources" -) - -// ImageManager handles image downloads and updates. -type ImageManager struct { - config *Config - manifest *Manifest - sources []sources.ImageSource -} - -// Manifest tracks installed images. -type Manifest struct { - Images map[string]ImageInfo `json:"images"` - path string -} - -// ImageInfo holds metadata about an installed image. -type ImageInfo struct { - Version string `json:"version"` - SHA256 string `json:"sha256,omitempty"` - Downloaded time.Time `json:"downloaded"` - Source string `json:"source"` -} - -// NewImageManager creates a new image manager. -func NewImageManager(cfg *Config) (*ImageManager, error) { - imagesDir, err := ImagesDir() - if err != nil { - return nil, err - } - - // Ensure images directory exists - if err := os.MkdirAll(imagesDir, 0755); err != nil { - return nil, err - } - - // Load or create manifest - manifestPath := filepath.Join(imagesDir, "manifest.json") - manifest, err := loadManifest(manifestPath) - if err != nil { - return nil, err - } - - // Build source list based on config - imageName := ImageName() - sourceCfg := sources.SourceConfig{ - GitHubRepo: cfg.Images.GitHub.Repo, - RegistryImage: cfg.Images.Registry.Image, - CDNURL: cfg.Images.CDN.URL, - ImageName: imageName, - } - - var srcs []sources.ImageSource - switch cfg.Images.Source { - case "github": - srcs = []sources.ImageSource{sources.NewGitHubSource(sourceCfg)} - case "cdn": - srcs = []sources.ImageSource{sources.NewCDNSource(sourceCfg)} - default: // "auto" - srcs = []sources.ImageSource{ - sources.NewGitHubSource(sourceCfg), - sources.NewCDNSource(sourceCfg), - } - } - - return &ImageManager{ - config: cfg, - manifest: manifest, - sources: srcs, - }, nil -} - -// IsInstalled checks if the dev image is installed. -func (m *ImageManager) IsInstalled() bool { - path, err := ImagePath() - if err != nil { - return false - } - _, err = os.Stat(path) - return err == nil -} - -// Install downloads and installs the dev image. -func (m *ImageManager) Install(ctx context.Context, progress func(downloaded, total int64)) error { - imagesDir, err := ImagesDir() - if err != nil { - return err - } - - // Find first available source - var src sources.ImageSource - for _, s := range m.sources { - if s.Available() { - src = s - break - } - } - if src == nil { - return fmt.Errorf("no image source available") - } - - // Get version - version, err := src.LatestVersion(ctx) - if err != nil { - return fmt.Errorf("failed to get latest version: %w", err) - } - - fmt.Printf("Downloading %s from %s...\n", ImageName(), src.Name()) - - // Download - if err := src.Download(ctx, imagesDir, progress); err != nil { - return err - } - - // Update manifest - m.manifest.Images[ImageName()] = ImageInfo{ - Version: version, - Downloaded: time.Now(), - Source: src.Name(), - } - - return m.manifest.Save() -} - -// CheckUpdate checks if an update is available. -func (m *ImageManager) CheckUpdate(ctx context.Context) (current, latest string, hasUpdate bool, err error) { - info, ok := m.manifest.Images[ImageName()] - if !ok { - return "", "", false, fmt.Errorf("image not installed") - } - current = info.Version - - // Find first available source - var src sources.ImageSource - for _, s := range m.sources { - if s.Available() { - src = s - break - } - } - if src == nil { - return current, "", false, fmt.Errorf("no image source available") - } - - latest, err = src.LatestVersion(ctx) - if err != nil { - return current, "", false, err - } - - hasUpdate = current != latest - return current, latest, hasUpdate, nil -} - -func loadManifest(path string) (*Manifest, error) { - m := &Manifest{ - Images: make(map[string]ImageInfo), - path: path, - } - - data, err := os.ReadFile(path) - if err != nil { - if os.IsNotExist(err) { - return m, nil - } - return nil, err - } - - if err := json.Unmarshal(data, m); err != nil { - return nil, err - } - m.path = path - - return m, nil -} - -// Save writes the manifest to disk. -func (m *Manifest) Save() error { - data, err := json.MarshalIndent(m, "", " ") - if err != nil { - return err - } - return os.WriteFile(m.path, data, 0644) -} -``` - -**Step 4: Run tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/devops/... -run TestImageManager -v` -Expected: PASS - -**Step 5: Commit** - -```bash -git add pkg/devops/images.go pkg/devops/images_test.go -git commit -m "feat(devops): add ImageManager - -Manages image downloads, manifest tracking, and update checking. -Tries sources in priority order (GitHub, CDN). - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 7: Implement Boot/Stop/Status - -**Files:** -- Modify: `pkg/devops/devops.go` -- Create: `pkg/devops/devops_test.go` - -**Step 1: Add boot/stop/status methods to devops.go** - -```go -// Add to devops.go - -// BootOptions configures how to boot the dev environment. -type BootOptions struct { - Memory int // MB, default 4096 - CPUs int // default 2 - Name string // container name - Fresh bool // destroy existing and start fresh -} - -// DefaultBootOptions returns sensible defaults. -func DefaultBootOptions() BootOptions { - return BootOptions{ - Memory: 4096, - CPUs: 2, - Name: "core-dev", - } -} - -// Boot starts the dev environment. -func (d *DevOps) Boot(ctx context.Context, opts BootOptions) error { - if !d.images.IsInstalled() { - return fmt.Errorf("dev image not installed (run 'core dev install' first)") - } - - // Check if already running - if !opts.Fresh { - running, err := d.IsRunning(ctx) - if err == nil && running { - return fmt.Errorf("dev environment already running (use 'core dev stop' first or --fresh)") - } - } - - // Stop existing if fresh - if opts.Fresh { - _ = d.Stop(ctx) - } - - imagePath, err := ImagePath() - if err != nil { - return err - } - - runOpts := container.RunOptions{ - Name: opts.Name, - Detach: true, - Memory: opts.Memory, - CPUs: opts.CPUs, - SSHPort: 2222, - } - - _, err = d.container.Run(ctx, imagePath, runOpts) - return err -} - -// Stop stops the dev environment. -func (d *DevOps) Stop(ctx context.Context) error { - containers, err := d.container.List(ctx) - if err != nil { - return err - } - - for _, c := range containers { - if c.Name == "core-dev" && c.Status == container.StatusRunning { - return d.container.Stop(ctx, c.ID) - } - } - - return nil -} - -// IsRunning checks if the dev environment is running. -func (d *DevOps) IsRunning(ctx context.Context) (bool, error) { - containers, err := d.container.List(ctx) - if err != nil { - return false, err - } - - for _, c := range containers { - if c.Name == "core-dev" && c.Status == container.StatusRunning { - return true, nil - } - } - - return false, nil -} - -// Status returns information about the dev environment. -type DevStatus struct { - Installed bool - Running bool - ImageVersion string - ContainerID string - Memory int - CPUs int - SSHPort int - Uptime time.Duration -} - -// Status returns the current dev environment status. -func (d *DevOps) Status(ctx context.Context) (*DevStatus, error) { - status := &DevStatus{ - Installed: d.images.IsInstalled(), - } - - if info, ok := d.images.manifest.Images[ImageName()]; ok { - status.ImageVersion = info.Version - } - - containers, err := d.container.List(ctx) - if err != nil { - return status, nil - } - - for _, c := range containers { - if c.Name == "core-dev" && c.Status == container.StatusRunning { - status.Running = true - status.ContainerID = c.ID - status.Memory = c.Memory - status.CPUs = c.CPUs - status.SSHPort = 2222 - status.Uptime = time.Since(c.StartedAt) - break - } - } - - return status, nil -} -``` - -**Step 2: Add missing import to devops.go** - -```go -import ( - "time" - // ... other imports -) -``` - -**Step 3: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./pkg/devops/...` -Expected: No errors - -**Step 4: Commit** - -```bash -git add pkg/devops/devops.go -git commit -m "feat(devops): add Boot/Stop/Status methods - -Manages dev VM lifecycle using LinuxKitManager. -Supports fresh boot, status checking, graceful stop. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 8: Implement Shell Command - -**Files:** -- Create: `pkg/devops/shell.go` - -**Step 1: Create shell.go** - -```go -package devops - -import ( - "context" - "fmt" - "os" - "os/exec" -) - -// ShellOptions configures the shell connection. -type ShellOptions struct { - Console bool // Use serial console instead of SSH - Command []string // Command to run (empty = interactive shell) -} - -// Shell connects to the dev environment. -func (d *DevOps) Shell(ctx context.Context, opts ShellOptions) error { - running, err := d.IsRunning(ctx) - if err != nil { - return err - } - if !running { - return fmt.Errorf("dev environment not running (run 'core dev boot' first)") - } - - if opts.Console { - return d.serialConsole(ctx) - } - - return d.sshShell(ctx, opts.Command) -} - -// sshShell connects via SSH. -func (d *DevOps) sshShell(ctx context.Context, command []string) error { - args := []string{ - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", - "-o", "LogLevel=ERROR", - "-A", // Agent forwarding - "-p", "2222", - "root@localhost", - } - - if len(command) > 0 { - args = append(args, command...) - } - - cmd := exec.CommandContext(ctx, "ssh", args...) - cmd.Stdin = os.Stdin - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - return cmd.Run() -} - -// serialConsole attaches to the QEMU serial console. -func (d *DevOps) serialConsole(ctx context.Context) error { - // Find the container to get its console socket - containers, err := d.container.List(ctx) - if err != nil { - return err - } - - for _, c := range containers { - if c.Name == "core-dev" { - // Use socat to connect to the console socket - socketPath := fmt.Sprintf("/tmp/core-%s-console.sock", c.ID) - cmd := exec.CommandContext(ctx, "socat", "-,raw,echo=0", "unix-connect:"+socketPath) - cmd.Stdin = os.Stdin - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - return cmd.Run() - } - } - - return fmt.Errorf("console not available") -} -``` - -**Step 2: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./pkg/devops/...` -Expected: No errors - -**Step 3: Commit** - -```bash -git add pkg/devops/shell.go -git commit -m "feat(devops): add Shell for SSH and console access - -Connects to dev VM via SSH (default) or serial console (--console). -Supports SSH agent forwarding for credential access. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 9: Implement Test Detection - -**Files:** -- Create: `pkg/devops/test.go` -- Create: `pkg/devops/test_test.go` - -**Step 1: Write the failing test** - -```go -package devops - -import ( - "os" - "path/filepath" - "testing" -) - -func TestDetectTestCommand_Good_ComposerJSON(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"scripts":{"test":"pest"}}`), 0644) - - cmd := DetectTestCommand(tmpDir) - if cmd != "composer test" { - t.Errorf("expected 'composer test', got %q", cmd) - } -} - -func TestDetectTestCommand_Good_PackageJSON(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"scripts":{"test":"vitest"}}`), 0644) - - cmd := DetectTestCommand(tmpDir) - if cmd != "npm test" { - t.Errorf("expected 'npm test', got %q", cmd) - } -} - -func TestDetectTestCommand_Good_GoMod(t *testing.T) { - tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "go.mod"), []byte("module example"), 0644) - - cmd := DetectTestCommand(tmpDir) - if cmd != "go test ./..." { - t.Errorf("expected 'go test ./...', got %q", cmd) - } -} - -func TestDetectTestCommand_Good_CoreTestYaml(t *testing.T) { - tmpDir := t.TempDir() - coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) - os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte("command: custom-test"), 0644) - - cmd := DetectTestCommand(tmpDir) - if cmd != "custom-test" { - t.Errorf("expected 'custom-test', got %q", cmd) - } -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/devops/... -run TestDetectTestCommand -v` -Expected: FAIL - -**Step 3: Write implementation** - -```go -package devops - -import ( - "context" - "encoding/json" - "fmt" - "os" - "path/filepath" - - "gopkg.in/yaml.v3" -) - -// TestConfig holds test configuration from .core/test.yaml. -type TestConfig struct { - Version int `yaml:"version"` - Command string `yaml:"command,omitempty"` - Commands []TestCommand `yaml:"commands,omitempty"` - Env map[string]string `yaml:"env,omitempty"` -} - -// TestCommand is a named test command. -type TestCommand struct { - Name string `yaml:"name"` - Run string `yaml:"run"` -} - -// TestOptions configures test execution. -type TestOptions struct { - Name string // Run specific named command from .core/test.yaml - Command []string // Override command (from -- args) -} - -// Test runs tests in the dev environment. -func (d *DevOps) Test(ctx context.Context, projectDir string, opts TestOptions) error { - running, err := d.IsRunning(ctx) - if err != nil { - return err - } - if !running { - return fmt.Errorf("dev environment not running (run 'core dev boot' first)") - } - - var cmd string - - // Priority: explicit command > named command > auto-detect - if len(opts.Command) > 0 { - cmd = joinCommand(opts.Command) - } else if opts.Name != "" { - cfg, err := LoadTestConfig(projectDir) - if err != nil { - return err - } - for _, c := range cfg.Commands { - if c.Name == opts.Name { - cmd = c.Run - break - } - } - if cmd == "" { - return fmt.Errorf("test command %q not found in .core/test.yaml", opts.Name) - } - } else { - cmd = DetectTestCommand(projectDir) - if cmd == "" { - return fmt.Errorf("could not detect test command (create .core/test.yaml)") - } - } - - // Run via SSH - return d.sshShell(ctx, []string{"cd", "/app", "&&", cmd}) -} - -// DetectTestCommand auto-detects the test command for a project. -func DetectTestCommand(projectDir string) string { - // 1. Check .core/test.yaml - cfg, err := LoadTestConfig(projectDir) - if err == nil && cfg.Command != "" { - return cfg.Command - } - - // 2. Check composer.json - if hasFile(projectDir, "composer.json") { - return "composer test" - } - - // 3. Check package.json - if hasFile(projectDir, "package.json") { - return "npm test" - } - - // 4. Check go.mod - if hasFile(projectDir, "go.mod") { - return "go test ./..." - } - - // 5. Check pytest - if hasFile(projectDir, "pytest.ini") || hasFile(projectDir, "pyproject.toml") { - return "pytest" - } - - // 6. Check Taskfile - if hasFile(projectDir, "Taskfile.yaml") || hasFile(projectDir, "Taskfile.yml") { - return "task test" - } - - return "" -} - -// LoadTestConfig loads .core/test.yaml. -func LoadTestConfig(projectDir string) (*TestConfig, error) { - path := filepath.Join(projectDir, ".core", "test.yaml") - data, err := os.ReadFile(path) - if err != nil { - return nil, err - } - - var cfg TestConfig - if err := yaml.Unmarshal(data, &cfg); err != nil { - return nil, err - } - - return &cfg, nil -} - -func hasFile(dir, name string) bool { - _, err := os.Stat(filepath.Join(dir, name)) - return err == nil -} - -func joinCommand(parts []string) string { - result := "" - for i, p := range parts { - if i > 0 { - result += " " - } - result += p - } - return result -} -``` - -**Step 4: Run tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/devops/... -run TestDetectTestCommand -v` -Expected: PASS - -**Step 5: Commit** - -```bash -git add pkg/devops/test.go pkg/devops/test_test.go -git commit -m "feat(devops): add test detection and execution - -Auto-detects test framework from project files. -Supports .core/test.yaml for custom configuration. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 10: Implement Serve with Mount - -**Files:** -- Create: `pkg/devops/serve.go` - -**Step 1: Create serve.go** - -```go -package devops - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" -) - -// ServeOptions configures the dev server. -type ServeOptions struct { - Port int // Port to serve on (default 8000) - Path string // Subdirectory to serve (default: current dir) -} - -// Serve mounts the project and starts a dev server. -func (d *DevOps) Serve(ctx context.Context, projectDir string, opts ServeOptions) error { - running, err := d.IsRunning(ctx) - if err != nil { - return err - } - if !running { - return fmt.Errorf("dev environment not running (run 'core dev boot' first)") - } - - if opts.Port == 0 { - opts.Port = 8000 - } - - servePath := projectDir - if opts.Path != "" { - servePath = filepath.Join(projectDir, opts.Path) - } - - // Mount project directory via SSHFS - if err := d.mountProject(ctx, servePath); err != nil { - return fmt.Errorf("failed to mount project: %w", err) - } - - // Detect and run serve command - serveCmd := DetectServeCommand(servePath) - fmt.Printf("Starting server: %s\n", serveCmd) - fmt.Printf("Listening on http://localhost:%d\n", opts.Port) - - // Run serve command via SSH - return d.sshShell(ctx, []string{"cd", "/app", "&&", serveCmd}) -} - -// mountProject mounts a directory into the VM via SSHFS. -func (d *DevOps) mountProject(ctx context.Context, path string) error { - absPath, err := filepath.Abs(path) - if err != nil { - return err - } - - // Use reverse SSHFS mount - // The VM connects back to host to mount the directory - cmd := exec.CommandContext(ctx, "ssh", - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", - "-R", "10000:localhost:22", // Reverse tunnel for SSHFS - "-p", "2222", - "root@localhost", - "mkdir -p /app && sshfs -p 10000 "+os.Getenv("USER")+"@localhost:"+absPath+" /app -o allow_other", - ) - return cmd.Run() -} - -// DetectServeCommand auto-detects the serve command for a project. -func DetectServeCommand(projectDir string) string { - // Laravel/Octane - if hasFile(projectDir, "artisan") { - return "php artisan octane:start --host=0.0.0.0 --port=8000" - } - - // Node.js with dev script - if hasFile(projectDir, "package.json") { - if hasPackageScript(projectDir, "dev") { - return "npm run dev -- --host 0.0.0.0" - } - if hasPackageScript(projectDir, "start") { - return "npm start" - } - } - - // PHP with composer - if hasFile(projectDir, "composer.json") { - return "frankenphp php-server -l :8000" - } - - // Go - if hasFile(projectDir, "go.mod") { - if hasFile(projectDir, "main.go") { - return "go run ." - } - } - - // Python - if hasFile(projectDir, "manage.py") { - return "python manage.py runserver 0.0.0.0:8000" - } - - // Fallback: simple HTTP server - return "python3 -m http.server 8000" -} - -func hasPackageScript(projectDir, script string) bool { - data, err := os.ReadFile(filepath.Join(projectDir, "package.json")) - if err != nil { - return false - } - - var pkg struct { - Scripts map[string]string `json:"scripts"` - } - if err := json.Unmarshal(data, &pkg); err != nil { - return false - } - - _, ok := pkg.Scripts[script] - return ok -} -``` - -**Step 2: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./pkg/devops/...` -Expected: No errors - -**Step 3: Commit** - -```bash -git add pkg/devops/serve.go -git commit -m "feat(devops): add Serve with project mounting - -Mounts project via SSHFS and runs auto-detected dev server. -Supports Laravel, Node.js, PHP, Go, Python projects. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 11: Implement Claude Sandbox - -**Files:** -- Create: `pkg/devops/claude.go` - -**Step 1: Create claude.go** - -```go -package devops - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" -) - -// ClaudeOptions configures the Claude sandbox session. -type ClaudeOptions struct { - NoAuth bool // Don't forward any auth - Auth []string // Selective auth: "gh", "anthropic", "ssh", "git" - Model string // Model to use: opus, sonnet -} - -// Claude starts a sandboxed Claude session in the dev environment. -func (d *DevOps) Claude(ctx context.Context, projectDir string, opts ClaudeOptions) error { - // Auto-boot if not running - running, err := d.IsRunning(ctx) - if err != nil { - return err - } - if !running { - fmt.Println("Dev environment not running, booting...") - if err := d.Boot(ctx, DefaultBootOptions()); err != nil { - return fmt.Errorf("failed to boot: %w", err) - } - } - - // Mount project - if err := d.mountProject(ctx, projectDir); err != nil { - return fmt.Errorf("failed to mount project: %w", err) - } - - // Prepare environment variables to forward - envVars := []string{} - - if !opts.NoAuth { - authTypes := opts.Auth - if len(authTypes) == 0 { - authTypes = []string{"gh", "anthropic", "ssh", "git"} - } - - for _, auth := range authTypes { - switch auth { - case "anthropic": - if key := os.Getenv("ANTHROPIC_API_KEY"); key != "" { - envVars = append(envVars, "ANTHROPIC_API_KEY="+key) - } - case "git": - // Forward git config - name, _ := exec.Command("git", "config", "user.name").Output() - email, _ := exec.Command("git", "config", "user.email").Output() - if len(name) > 0 { - envVars = append(envVars, "GIT_AUTHOR_NAME="+strings.TrimSpace(string(name))) - envVars = append(envVars, "GIT_COMMITTER_NAME="+strings.TrimSpace(string(name))) - } - if len(email) > 0 { - envVars = append(envVars, "GIT_AUTHOR_EMAIL="+strings.TrimSpace(string(email))) - envVars = append(envVars, "GIT_COMMITTER_EMAIL="+strings.TrimSpace(string(email))) - } - } - } - } - - // Build SSH command with agent forwarding - args := []string{ - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", - "-o", "LogLevel=ERROR", - "-A", // SSH agent forwarding - "-p", "2222", - } - - // Add environment variables - for _, env := range envVars { - args = append(args, "-o", "SendEnv="+strings.Split(env, "=")[0]) - } - - args = append(args, "root@localhost") - - // Build command to run inside - claudeCmd := "cd /app && claude" - if opts.Model != "" { - claudeCmd += " --model " + opts.Model - } - args = append(args, claudeCmd) - - // Set environment for SSH - cmd := exec.CommandContext(ctx, "ssh", args...) - cmd.Stdin = os.Stdin - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - cmd.Env = append(os.Environ(), envVars...) - - fmt.Println("Starting Claude in sandboxed environment...") - fmt.Println("Project mounted at /app") - fmt.Println("Auth forwarded: SSH agent" + formatAuthList(opts)) - fmt.Println() - - return cmd.Run() -} - -func formatAuthList(opts ClaudeOptions) string { - if opts.NoAuth { - return " (none)" - } - if len(opts.Auth) == 0 { - return ", gh, anthropic, git" - } - return ", " + strings.Join(opts.Auth, ", ") -} - -// CopyGHAuth copies GitHub CLI auth to the VM. -func (d *DevOps) CopyGHAuth(ctx context.Context) error { - home, err := os.UserHomeDir() - if err != nil { - return err - } - - ghConfigDir := filepath.Join(home, ".config", "gh") - if _, err := os.Stat(ghConfigDir); os.IsNotExist(err) { - return nil // No gh config to copy - } - - // Use scp to copy gh config - cmd := exec.CommandContext(ctx, "scp", - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", - "-P", "2222", - "-r", ghConfigDir, - "root@localhost:/root/.config/", - ) - return cmd.Run() -} -``` - -**Step 2: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./pkg/devops/...` -Expected: No errors - -**Step 3: Commit** - -```bash -git add pkg/devops/claude.go -git commit -m "feat(devops): add Claude sandbox session - -Starts Claude in immutable dev environment with auth forwarding. -Auto-boots VM, mounts project, forwards credentials. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 12: Add CLI Commands - -**Files:** -- Create: `cmd/core/cmd/dev.go` -- Modify: `cmd/core/cmd/root.go` - -**Step 1: Create dev.go** - -```go -package cmd - -import ( - "context" - "fmt" - "os" - "strings" - - "github.com/charmbracelet/lipgloss" - "github.com/host-uk/core/pkg/devops" - "github.com/leaanthony/clir" -) - -var ( - devHeaderStyle = lipgloss.NewStyle(). - Bold(true). - Foreground(lipgloss.Color("#3b82f6")) - - devSuccessStyle = lipgloss.NewStyle(). - Bold(true). - Foreground(lipgloss.Color("#22c55e")) - - devErrorStyle = lipgloss.NewStyle(). - Bold(true). - Foreground(lipgloss.Color("#ef4444")) - - devDimStyle = lipgloss.NewStyle(). - Foreground(lipgloss.Color("#6b7280")) -) - -// AddDevCommand adds the dev command group. -func AddDevCommand(app *clir.Cli) { - devCmd := app.NewSubCommand("dev", "Portable development environment") - devCmd.LongDescription("Manage the core-devops portable development environment.\n" + - "A sandboxed, immutable Linux VM with 100+ development tools.") - - addDevInstallCommand(devCmd) - addDevBootCommand(devCmd) - addDevStopCommand(devCmd) - addDevStatusCommand(devCmd) - addDevShellCommand(devCmd) - addDevServeCommand(devCmd) - addDevTestCommand(devCmd) - addDevClaudeCommand(devCmd) - addDevUpdateCommand(devCmd) -} - -func addDevInstallCommand(parent *clir.Cli) { - var source string - cmd := parent.NewSubCommand("install", "Download the dev environment image") - cmd.StringFlag("source", "Image source: auto, github, registry, cdn", &source) - - cmd.Action(func() error { - ctx := context.Background() - d, err := devops.New() - if err != nil { - return err - } - - if d.IsInstalled() { - fmt.Printf("%s Dev image already installed\n", devSuccessStyle.Render("OK:")) - fmt.Println("Use 'core dev update' to check for updates") - return nil - } - - fmt.Printf("%s Downloading dev image...\n", devHeaderStyle.Render("Install:")) - - progress := func(downloaded, total int64) { - if total > 0 { - pct := float64(downloaded) / float64(total) * 100 - fmt.Printf("\r %.1f%% (%d / %d MB)", pct, downloaded/1024/1024, total/1024/1024) - } - } - - if err := d.Install(ctx, progress); err != nil { - return err - } - - fmt.Println() - fmt.Printf("%s Dev image installed\n", devSuccessStyle.Render("Success:")) - return nil - }) -} - -func addDevBootCommand(parent *clir.Cli) { - var memory, cpus int - var fresh bool - - cmd := parent.NewSubCommand("boot", "Start the dev environment") - cmd.IntFlag("memory", "Memory in MB (default: 4096)", &memory) - cmd.IntFlag("cpus", "Number of CPUs (default: 2)", &cpus) - cmd.BoolFlag("fresh", "Destroy existing and start fresh", &fresh) - - cmd.Action(func() error { - ctx := context.Background() - d, err := devops.New() - if err != nil { - return err - } - - opts := devops.DefaultBootOptions() - if memory > 0 { - opts.Memory = memory - } - if cpus > 0 { - opts.CPUs = cpus - } - opts.Fresh = fresh - - fmt.Printf("%s Starting dev environment...\n", devHeaderStyle.Render("Boot:")) - - if err := d.Boot(ctx, opts); err != nil { - return err - } - - fmt.Printf("%s Dev environment running\n", devSuccessStyle.Render("Success:")) - fmt.Printf(" Memory: %d MB\n", opts.Memory) - fmt.Printf(" CPUs: %d\n", opts.CPUs) - fmt.Printf(" SSH: ssh -p 2222 root@localhost\n") - return nil - }) -} - -func addDevStopCommand(parent *clir.Cli) { - cmd := parent.NewSubCommand("stop", "Stop the dev environment") - cmd.Action(func() error { - ctx := context.Background() - d, err := devops.New() - if err != nil { - return err - } - - fmt.Printf("%s Stopping dev environment...\n", devHeaderStyle.Render("Stop:")) - - if err := d.Stop(ctx); err != nil { - return err - } - - fmt.Printf("%s Dev environment stopped\n", devSuccessStyle.Render("Success:")) - return nil - }) -} - -func addDevStatusCommand(parent *clir.Cli) { - cmd := parent.NewSubCommand("status", "Show dev environment status") - cmd.Action(func() error { - ctx := context.Background() - d, err := devops.New() - if err != nil { - return err - } - - status, err := d.Status(ctx) - if err != nil { - return err - } - - fmt.Printf("%s Dev Environment\n\n", devHeaderStyle.Render("Status:")) - - if status.Installed { - fmt.Printf(" Image: %s\n", devSuccessStyle.Render("installed")) - fmt.Printf(" Version: %s\n", status.ImageVersion) - } else { - fmt.Printf(" Image: %s\n", devDimStyle.Render("not installed")) - } - - if status.Running { - fmt.Printf(" Status: %s\n", devSuccessStyle.Render("running")) - fmt.Printf(" ID: %s\n", status.ContainerID[:8]) - fmt.Printf(" Memory: %d MB\n", status.Memory) - fmt.Printf(" CPUs: %d\n", status.CPUs) - fmt.Printf(" SSH: port %d\n", status.SSHPort) - fmt.Printf(" Uptime: %s\n", status.Uptime.Round(1000000000)) - } else { - fmt.Printf(" Status: %s\n", devDimStyle.Render("stopped")) - } - - return nil - }) -} - -func addDevShellCommand(parent *clir.Cli) { - var console bool - cmd := parent.NewSubCommand("shell", "Open a shell in the dev environment") - cmd.BoolFlag("console", "Use serial console instead of SSH", &console) - - cmd.Action(func() error { - ctx := context.Background() - d, err := devops.New() - if err != nil { - return err - } - - return d.Shell(ctx, devops.ShellOptions{Console: console}) - }) -} - -func addDevServeCommand(parent *clir.Cli) { - var port int - var path string - - cmd := parent.NewSubCommand("serve", "Mount project and start dev server") - cmd.IntFlag("port", "Port to serve on (default: 8000)", &port) - cmd.StringFlag("path", "Subdirectory to serve", &path) - - cmd.Action(func() error { - ctx := context.Background() - d, err := devops.New() - if err != nil { - return err - } - - projectDir, _ := os.Getwd() - return d.Serve(ctx, projectDir, devops.ServeOptions{Port: port, Path: path}) - }) -} - -func addDevTestCommand(parent *clir.Cli) { - var name string - - cmd := parent.NewSubCommand("test", "Run tests in dev environment") - cmd.StringFlag("name", "Run specific named test from .core/test.yaml", &name) - - cmd.Action(func() error { - ctx := context.Background() - d, err := devops.New() - if err != nil { - return err - } - - projectDir, _ := os.Getwd() - args := cmd.OtherArgs() - - return d.Test(ctx, projectDir, devops.TestOptions{ - Name: name, - Command: args, - }) - }) -} - -func addDevClaudeCommand(parent *clir.Cli) { - var noAuth bool - var auth string - var model string - - cmd := parent.NewSubCommand("claude", "Start Claude in sandboxed dev environment") - cmd.BoolFlag("no-auth", "Don't forward any credentials", &noAuth) - cmd.StringFlag("auth", "Selective auth forwarding: gh,anthropic,ssh,git", &auth) - cmd.StringFlag("model", "Model to use: opus, sonnet", &model) - - cmd.Action(func() error { - ctx := context.Background() - d, err := devops.New() - if err != nil { - return err - } - - projectDir, _ := os.Getwd() - - var authList []string - if auth != "" { - authList = strings.Split(auth, ",") - } - - return d.Claude(ctx, projectDir, devops.ClaudeOptions{ - NoAuth: noAuth, - Auth: authList, - Model: model, - }) - }) -} - -func addDevUpdateCommand(parent *clir.Cli) { - var force bool - cmd := parent.NewSubCommand("update", "Check for and download image updates") - cmd.BoolFlag("force", "Force download even if up to date", &force) - - cmd.Action(func() error { - ctx := context.Background() - d, err := devops.New() - if err != nil { - return err - } - - if !d.IsInstalled() { - return fmt.Errorf("dev image not installed (run 'core dev install' first)") - } - - fmt.Printf("%s Checking for updates...\n", devHeaderStyle.Render("Update:")) - - current, latest, hasUpdate, err := d.CheckUpdate(ctx) - if err != nil { - return err - } - - if !hasUpdate && !force { - fmt.Printf("%s Already up to date (%s)\n", devSuccessStyle.Render("OK:"), current) - return nil - } - - fmt.Printf(" Current: %s\n", current) - fmt.Printf(" Latest: %s\n", latest) - - progress := func(downloaded, total int64) { - if total > 0 { - pct := float64(downloaded) / float64(total) * 100 - fmt.Printf("\r Downloading: %.1f%%", pct) - } - } - - if err := d.Install(ctx, progress); err != nil { - return err - } - - fmt.Println() - fmt.Printf("%s Updated to %s\n", devSuccessStyle.Render("Success:"), latest) - return nil - }) -} -``` - -**Step 2: Add to root.go** - -Add after other command registrations: -```go -AddDevCommand(app) -``` - -**Step 3: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./cmd/core/...` -Expected: No errors - -**Step 4: Commit** - -```bash -git add cmd/core/cmd/dev.go cmd/core/cmd/root.go -git commit -m "feat(cli): add dev command group - -Commands: -- core dev install/boot/stop/status -- core dev shell/serve/test -- core dev claude (sandboxed AI session) -- core dev update - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 13: Final Integration Test - -**Step 1: Build CLI** - -Run: `cd /Users/snider/Code/Core && go build -o bin/core ./cmd/core` -Expected: No errors - -**Step 2: Test help output** - -Run: `./bin/core dev --help` -Expected: Shows all dev subcommands - -**Step 3: Run package tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/devops/... -v` -Expected: All tests pass - -**Step 4: Update TODO.md** - -Mark S4.6 tasks as complete in tasks/TODO.md - -**Step 5: Final commit** - -```bash -git add -A -git commit -m "chore(devops): finalize S4.6 core-devops CLI - -All dev commands implemented: -- install/boot/stop/status -- shell/serve/test -- claude (sandboxed AI session) -- update - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -## Summary - -13 tasks covering: -1. Package structure -2. Config loading -3. ImageSource interface -4. GitHub source -5. CDN source -6. ImageManager -7. Boot/Stop/Status -8. Shell command -9. Test detection -10. Serve with mount -11. Claude sandbox -12. CLI commands -13. Integration test diff --git a/tasks/plans/2026-01-29-sdk-generation-design.md b/tasks/plans/2026-01-29-sdk-generation-design.md deleted file mode 100644 index ee189fc..0000000 --- a/tasks/plans/2026-01-29-sdk-generation-design.md +++ /dev/null @@ -1,291 +0,0 @@ -# SDK Generation Design - -## Summary - -Generate typed API clients from OpenAPI specs for TypeScript, Python, Go, and PHP. Includes breaking change detection via semantic diff. - -## Design Decisions - -- **Generator approach**: Hybrid - native generators where available, openapi-generator fallback -- **Languages**: TypeScript, Python, Go, PHP (Core 4) -- **Detection**: Config → common paths → Laravel Scramble -- **Output**: Local `sdk/` + optional monorepo publish -- **Diff**: Semantic with oasdiff, CI-friendly exit codes -- **Priority**: DX (developer experience) - -## Package Structure - -``` -pkg/sdk/ -├── sdk.go # Main SDK type, orchestration -├── detect.go # OpenAPI spec detection -├── diff.go # Breaking change detection (oasdiff) -├── generators/ -│ ├── generator.go # Generator interface -│ ├── typescript.go # openapi-typescript-codegen -│ ├── python.go # openapi-python-client -│ ├── go.go # oapi-codegen -│ └── php.go # openapi-generator (Docker) -└── templates/ # Package scaffolding templates - ├── typescript/ - │ └── package.json.tmpl - ├── python/ - │ └── setup.py.tmpl - ├── go/ - │ └── go.mod.tmpl - └── php/ - └── composer.json.tmpl -``` - -## OpenAPI Detection Flow - -``` -1. Check config: sdk.spec in .core/release.yaml - ↓ not found -2. Check common paths: - - api/openapi.yaml - - api/openapi.json - - openapi.yaml - - openapi.json - - docs/api.yaml - - swagger.yaml - ↓ not found -3. Laravel Scramble detection: - - Check for scramble/scramble in composer.json - - Run: php artisan scramble:export --path=api/openapi.json - - Use generated spec - ↓ not found -4. Error: No OpenAPI spec found -``` - -## Generator Interface - -```go -type Generator interface { - // Language returns the generator's target language - Language() string - - // Generate creates SDK from OpenAPI spec - Generate(ctx context.Context, opts GenerateOptions) error - - // Available checks if generator dependencies are installed - Available() bool - - // Install provides installation instructions - Install() string -} - -type GenerateOptions struct { - SpecPath string // OpenAPI spec file - OutputDir string // Where to write SDK - PackageName string // Package/module name - Version string // SDK version -} -``` - -### Native Generators - -| Language | Tool | Install | -|------------|----------------------------|--------------------------------| -| TypeScript | openapi-typescript-codegen | `npm i -g openapi-typescript-codegen` | -| Python | openapi-python-client | `pip install openapi-python-client` | -| Go | oapi-codegen | `go install github.com/deepmap/oapi-codegen/cmd/oapi-codegen@latest` | -| PHP | openapi-generator (Docker) | Requires Docker | - -### Fallback Strategy - -```go -func (g *TypeScriptGenerator) Generate(ctx context.Context, opts GenerateOptions) error { - if g.Available() { - return g.generateNative(ctx, opts) - } - return g.generateDocker(ctx, opts) // openapi-generator in Docker -} -``` - -## Breaking Change Detection - -Using [oasdiff](https://github.com/Tufin/oasdiff) for semantic OpenAPI comparison: - -```go -import "github.com/tufin/oasdiff/diff" -import "github.com/tufin/oasdiff/checker" - -func (s *SDK) Diff(base, revision string) (*DiffResult, error) { - // Load specs - baseSpec, _ := load.From(loader, base) - revSpec, _ := load.From(loader, revision) - - // Compute diff - d, _ := diff.Get(diff.NewConfig(), baseSpec, revSpec) - - // Check for breaking changes - breaks := checker.CheckBackwardCompatibility( - checker.GetDefaultChecks(), - d, - baseSpec, - revSpec, - ) - - return &DiffResult{ - Breaking: len(breaks) > 0, - Changes: breaks, - Summary: formatSummary(d), - }, nil -} -``` - -### Exit Codes for CI - -| Exit Code | Meaning | -|-----------|---------| -| 0 | No breaking changes | -| 1 | Breaking changes detected | -| 2 | Error (invalid spec, etc.) | - -### Breaking Change Categories - -- Removed endpoints -- Changed required parameters -- Modified response schemas -- Changed authentication requirements - -## CLI Commands - -```bash -# Generate SDKs from OpenAPI spec -core sdk generate # Uses .core/release.yaml config -core sdk generate --spec api.yaml # Explicit spec file -core sdk generate --lang typescript # Single language - -# Check for breaking changes -core sdk diff # Compare current vs last release -core sdk diff --spec api.yaml --base v1.0.0 - -# Validate spec before generation -core sdk validate -core sdk validate --spec api.yaml -``` - -## Config Schema - -In `.core/release.yaml`: - -```yaml -sdk: - # OpenAPI spec source (auto-detected if omitted) - spec: api/openapi.yaml - - # Languages to generate - languages: - - typescript - - python - - go - - php - - # Output directory (default: sdk/) - output: sdk/ - - # Package naming - package: - name: myapi # Base name - version: "{{.Version}}" - - # Breaking change detection - diff: - enabled: true - fail_on_breaking: true # CI fails on breaking changes - - # Optional: publish to monorepo - publish: - repo: myorg/sdks - path: packages/myapi -``` - -## Output Structure - -Each generator outputs to `sdk/{lang}/`: - -``` -sdk/ -├── typescript/ -│ ├── package.json -│ ├── src/ -│ │ ├── index.ts -│ │ ├── client.ts -│ │ └── models/ -│ └── tsconfig.json -├── python/ -│ ├── setup.py -│ ├── myapi/ -│ │ ├── __init__.py -│ │ ├── client.py -│ │ └── models/ -│ └── requirements.txt -├── go/ -│ ├── go.mod -│ ├── client.go -│ └── models.go -└── php/ - ├── composer.json - ├── src/ - │ ├── Client.php - │ └── Models/ - └── README.md -``` - -## Publishing Workflow - -SDK publishing integrates with the existing release pipeline: - -``` -core release - → build artifacts - → generate SDKs (if sdk: configured) - → run diff check (warns or fails on breaking) - → publish to GitHub release - → publish SDKs (optional) -``` - -### Monorepo Publishing - -For projects using a shared SDK monorepo: - -1. Clone target repo (shallow) -2. Update `packages/{name}/{lang}/` -3. Commit with version tag -4. Push (triggers downstream CI) - -The SDK tarball is also attached to GitHub releases for direct download. - -## Implementation Steps - -1. Create `pkg/sdk/` package structure -2. Implement OpenAPI detection (`detect.go`) -3. Define Generator interface (`generators/generator.go`) -4. Implement TypeScript generator (native + fallback) -5. Implement Python generator (native + fallback) -6. Implement Go generator (native) -7. Implement PHP generator (Docker-based) -8. Add package templates (`templates/`) -9. Implement diff with oasdiff (`diff.go`) -10. Add CLI commands (`cmd/core/sdk.go`) -11. Integrate with release pipeline -12. Add monorepo publish support - -## Dependencies - -```go -// go.mod additions -require ( - github.com/tufin/oasdiff v1.x.x - github.com/getkin/kin-openapi v0.x.x -) -``` - -## Testing - -- Unit tests for each generator -- Integration tests with sample OpenAPI specs -- Diff tests with known breaking/non-breaking changes -- E2E test generating SDKs for a real API diff --git a/tasks/plans/2026-01-29-sdk-generation-impl.md b/tasks/plans/2026-01-29-sdk-generation-impl.md deleted file mode 100644 index 8e263f0..0000000 --- a/tasks/plans/2026-01-29-sdk-generation-impl.md +++ /dev/null @@ -1,1861 +0,0 @@ -# SDK Generation Implementation Plan - -> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. - -**Goal:** Generate typed API clients from OpenAPI specs for TypeScript, Python, Go, and PHP with breaking change detection. - -**Architecture:** Hybrid generator approach - native tools where available (openapi-typescript-codegen, openapi-python-client, oapi-codegen), Docker fallback for others (openapi-generator). Detection flow: config → common paths → Laravel Scramble. Breaking changes via oasdiff library. - -**Tech Stack:** Go, oasdiff, kin-openapi, embedded templates, exec for native generators, Docker for fallback - ---- - -### Task 1: Create SDK Package Structure - -**Files:** -- Create: `pkg/sdk/sdk.go` -- Create: `pkg/sdk/go.mod` - -**Step 1: Create go.mod for sdk package** - -```go -module github.com/host-uk/core/pkg/sdk - -go 1.25 - -require ( - github.com/getkin/kin-openapi v0.128.0 - github.com/tufin/oasdiff v1.10.25 - gopkg.in/yaml.v3 v3.0.1 -) -``` - -**Step 2: Create sdk.go with types and config** - -```go -// Package sdk provides OpenAPI SDK generation and diff capabilities. -package sdk - -import ( - "context" - "fmt" -) - -// Config holds SDK generation configuration from .core/release.yaml. -type Config struct { - // Spec is the path to the OpenAPI spec file (auto-detected if empty). - Spec string `yaml:"spec,omitempty"` - // Languages to generate SDKs for. - Languages []string `yaml:"languages,omitempty"` - // Output directory (default: sdk/). - Output string `yaml:"output,omitempty"` - // Package naming configuration. - Package PackageConfig `yaml:"package,omitempty"` - // Diff configuration for breaking change detection. - Diff DiffConfig `yaml:"diff,omitempty"` - // Publish configuration for monorepo publishing. - Publish PublishConfig `yaml:"publish,omitempty"` -} - -// PackageConfig holds package naming configuration. -type PackageConfig struct { - // Name is the base package name. - Name string `yaml:"name,omitempty"` - // Version is the SDK version (supports templates like {{.Version}}). - Version string `yaml:"version,omitempty"` -} - -// DiffConfig holds breaking change detection configuration. -type DiffConfig struct { - // Enabled determines whether to run diff checks. - Enabled bool `yaml:"enabled,omitempty"` - // FailOnBreaking fails the release if breaking changes are detected. - FailOnBreaking bool `yaml:"fail_on_breaking,omitempty"` -} - -// PublishConfig holds monorepo publishing configuration. -type PublishConfig struct { - // Repo is the SDK monorepo (e.g., "myorg/sdks"). - Repo string `yaml:"repo,omitempty"` - // Path is the subdirectory for this SDK (e.g., "packages/myapi"). - Path string `yaml:"path,omitempty"` -} - -// SDK orchestrates OpenAPI SDK generation. -type SDK struct { - config *Config - projectDir string -} - -// New creates a new SDK instance. -func New(projectDir string, config *Config) *SDK { - if config == nil { - config = DefaultConfig() - } - return &SDK{ - config: config, - projectDir: projectDir, - } -} - -// DefaultConfig returns sensible defaults for SDK configuration. -func DefaultConfig() *Config { - return &Config{ - Languages: []string{"typescript", "python", "go", "php"}, - Output: "sdk", - Diff: DiffConfig{ - Enabled: true, - FailOnBreaking: false, - }, - } -} - -// Generate generates SDKs for all configured languages. -func (s *SDK) Generate(ctx context.Context) error { - return fmt.Errorf("sdk.Generate: not implemented") -} - -// GenerateLanguage generates SDK for a specific language. -func (s *SDK) GenerateLanguage(ctx context.Context, lang string) error { - return fmt.Errorf("sdk.GenerateLanguage: not implemented") -} -``` - -**Step 3: Add to go.work** - -Run: `cd /Users/snider/Code/Core && echo " ./pkg/sdk" >> go.work && go work sync` - -**Step 4: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./pkg/sdk/...` -Expected: No errors - -**Step 5: Commit** - -```bash -git add pkg/sdk/ -git add go.work go.work.sum -git commit -m "feat(sdk): add SDK package structure with types - -Initial pkg/sdk setup with Config types for OpenAPI SDK generation. -Includes language selection, diff config, and publish config. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 2: Implement OpenAPI Spec Detection - -**Files:** -- Create: `pkg/sdk/detect.go` -- Create: `pkg/sdk/detect_test.go` - -**Step 1: Write the failing test** - -```go -package sdk - -import ( - "os" - "path/filepath" - "testing" -) - -func TestDetectSpec_Good_ConfigPath(t *testing.T) { - // Create temp directory with spec at configured path - tmpDir := t.TempDir() - specPath := filepath.Join(tmpDir, "api", "spec.yaml") - os.MkdirAll(filepath.Dir(specPath), 0755) - os.WriteFile(specPath, []byte("openapi: 3.0.0"), 0644) - - sdk := New(tmpDir, &Config{Spec: "api/spec.yaml"}) - got, err := sdk.DetectSpec() - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - if got != specPath { - t.Errorf("got %q, want %q", got, specPath) - } -} - -func TestDetectSpec_Good_CommonPath(t *testing.T) { - // Create temp directory with spec at common path - tmpDir := t.TempDir() - specPath := filepath.Join(tmpDir, "openapi.yaml") - os.WriteFile(specPath, []byte("openapi: 3.0.0"), 0644) - - sdk := New(tmpDir, nil) - got, err := sdk.DetectSpec() - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - if got != specPath { - t.Errorf("got %q, want %q", got, specPath) - } -} - -func TestDetectSpec_Bad_NotFound(t *testing.T) { - tmpDir := t.TempDir() - sdk := New(tmpDir, nil) - _, err := sdk.DetectSpec() - if err == nil { - t.Fatal("expected error for missing spec") - } -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/sdk/... -run TestDetectSpec -v` -Expected: FAIL (DetectSpec not defined) - -**Step 3: Write minimal implementation** - -```go -package sdk - -import ( - "fmt" - "os" - "path/filepath" -) - -// commonSpecPaths are checked in order when no spec is configured. -var commonSpecPaths = []string{ - "api/openapi.yaml", - "api/openapi.json", - "openapi.yaml", - "openapi.json", - "docs/api.yaml", - "docs/api.json", - "swagger.yaml", - "swagger.json", -} - -// DetectSpec finds the OpenAPI spec file. -// Priority: config path → common paths → Laravel Scramble. -func (s *SDK) DetectSpec() (string, error) { - // 1. Check configured path - if s.config.Spec != "" { - specPath := filepath.Join(s.projectDir, s.config.Spec) - if _, err := os.Stat(specPath); err == nil { - return specPath, nil - } - return "", fmt.Errorf("sdk.DetectSpec: configured spec not found: %s", s.config.Spec) - } - - // 2. Check common paths - for _, p := range commonSpecPaths { - specPath := filepath.Join(s.projectDir, p) - if _, err := os.Stat(specPath); err == nil { - return specPath, nil - } - } - - // 3. Try Laravel Scramble detection - specPath, err := s.detectScramble() - if err == nil { - return specPath, nil - } - - return "", fmt.Errorf("sdk.DetectSpec: no OpenAPI spec found (checked config, common paths, Scramble)") -} - -// detectScramble checks for Laravel Scramble and exports the spec. -func (s *SDK) detectScramble() (string, error) { - composerPath := filepath.Join(s.projectDir, "composer.json") - if _, err := os.Stat(composerPath); err != nil { - return "", fmt.Errorf("no composer.json") - } - - // Check for scramble in composer.json - data, err := os.ReadFile(composerPath) - if err != nil { - return "", err - } - - // Simple check for scramble package - if !containsScramble(data) { - return "", fmt.Errorf("scramble not found in composer.json") - } - - // TODO: Run php artisan scramble:export - return "", fmt.Errorf("scramble export not implemented") -} - -// containsScramble checks if composer.json includes scramble. -func containsScramble(data []byte) bool { - return len(data) > 0 && - (contains(data, "dedoc/scramble") || contains(data, "\"scramble\"")) -} - -// contains is a simple byte slice search. -func contains(data []byte, substr string) bool { - return len(data) >= len(substr) && - string(data) != "" && - indexOf(string(data), substr) >= 0 -} - -func indexOf(s, substr string) int { - for i := 0; i <= len(s)-len(substr); i++ { - if s[i:i+len(substr)] == substr { - return i - } - } - return -1 -} -``` - -**Step 4: Run tests to verify they pass** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/sdk/... -run TestDetectSpec -v` -Expected: PASS - -**Step 5: Commit** - -```bash -git add pkg/sdk/detect.go pkg/sdk/detect_test.go -git commit -m "feat(sdk): add OpenAPI spec detection - -Detects OpenAPI spec via: -1. Configured spec path -2. Common paths (api/openapi.yaml, openapi.yaml, etc.) -3. Laravel Scramble (stub for now) - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 3: Define Generator Interface - -**Files:** -- Create: `pkg/sdk/generators/generator.go` - -**Step 1: Create generator interface** - -```go -// Package generators provides SDK code generators for different languages. -package generators - -import ( - "context" -) - -// Options holds common generation options. -type Options struct { - // SpecPath is the path to the OpenAPI spec file. - SpecPath string - // OutputDir is where to write the generated SDK. - OutputDir string - // PackageName is the package/module name. - PackageName string - // Version is the SDK version. - Version string -} - -// Generator defines the interface for SDK generators. -type Generator interface { - // Language returns the generator's target language identifier. - Language() string - - // Generate creates SDK from OpenAPI spec. - Generate(ctx context.Context, opts Options) error - - // Available checks if generator dependencies are installed. - Available() bool - - // Install returns instructions for installing the generator. - Install() string -} - -// Registry holds available generators. -type Registry struct { - generators map[string]Generator -} - -// NewRegistry creates a registry with all available generators. -func NewRegistry() *Registry { - r := &Registry{ - generators: make(map[string]Generator), - } - // Generators will be registered in subsequent tasks - return r -} - -// Get returns a generator by language. -func (r *Registry) Get(lang string) (Generator, bool) { - g, ok := r.generators[lang] - return g, ok -} - -// Register adds a generator to the registry. -func (r *Registry) Register(g Generator) { - r.generators[g.Language()] = g -} - -// Languages returns all registered language identifiers. -func (r *Registry) Languages() []string { - langs := make([]string, 0, len(r.generators)) - for lang := range r.generators { - langs = append(langs, lang) - } - return langs -} -``` - -**Step 2: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./pkg/sdk/...` -Expected: No errors - -**Step 3: Commit** - -```bash -git add pkg/sdk/generators/generator.go -git commit -m "feat(sdk): add Generator interface and Registry - -Defines the common interface for SDK generators with: -- Generate(), Available(), Install() methods -- Registry for managing multiple generators - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 4: Implement TypeScript Generator - -**Files:** -- Create: `pkg/sdk/generators/typescript.go` -- Create: `pkg/sdk/generators/typescript_test.go` - -**Step 1: Write the failing test** - -```go -package generators - -import ( - "context" - "os" - "os/exec" - "path/filepath" - "testing" -) - -func TestTypeScriptGenerator_Good_Available(t *testing.T) { - g := NewTypeScriptGenerator() - // Just check it doesn't panic - _ = g.Available() - _ = g.Language() - _ = g.Install() -} - -func TestTypeScriptGenerator_Good_Generate(t *testing.T) { - // Skip if no generator available - g := NewTypeScriptGenerator() - if !g.Available() && !dockerAvailable() { - t.Skip("no TypeScript generator available (need openapi-typescript-codegen or Docker)") - } - - // Create temp spec - tmpDir := t.TempDir() - specPath := filepath.Join(tmpDir, "spec.yaml") - spec := `openapi: "3.0.0" -info: - title: Test API - version: "1.0.0" -paths: - /health: - get: - operationId: getHealth - responses: - "200": - description: OK -` - os.WriteFile(specPath, []byte(spec), 0644) - - outputDir := filepath.Join(tmpDir, "sdk", "typescript") - err := g.Generate(context.Background(), Options{ - SpecPath: specPath, - OutputDir: outputDir, - PackageName: "test-api", - Version: "1.0.0", - }) - if err != nil { - t.Fatalf("Generate failed: %v", err) - } - - // Check output exists - if _, err := os.Stat(outputDir); os.IsNotExist(err) { - t.Error("output directory not created") - } -} - -func dockerAvailable() bool { - _, err := exec.LookPath("docker") - return err == nil -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/sdk/generators/... -run TestTypeScriptGenerator -v` -Expected: FAIL (NewTypeScriptGenerator not defined) - -**Step 3: Write implementation** - -```go -package generators - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" -) - -// TypeScriptGenerator generates TypeScript SDKs using openapi-typescript-codegen. -type TypeScriptGenerator struct{} - -// NewTypeScriptGenerator creates a new TypeScript generator. -func NewTypeScriptGenerator() *TypeScriptGenerator { - return &TypeScriptGenerator{} -} - -// Language returns "typescript". -func (g *TypeScriptGenerator) Language() string { - return "typescript" -} - -// Available checks if openapi-typescript-codegen is installed. -func (g *TypeScriptGenerator) Available() bool { - _, err := exec.LookPath("openapi-typescript-codegen") - if err == nil { - return true - } - // Also check npx availability - _, err = exec.LookPath("npx") - return err == nil -} - -// Install returns installation instructions. -func (g *TypeScriptGenerator) Install() string { - return "npm install -g openapi-typescript-codegen" -} - -// Generate creates TypeScript SDK from OpenAPI spec. -func (g *TypeScriptGenerator) Generate(ctx context.Context, opts Options) error { - // Ensure output directory exists - if err := os.MkdirAll(opts.OutputDir, 0755); err != nil { - return fmt.Errorf("typescript.Generate: failed to create output dir: %w", err) - } - - // Try native generator first - if g.nativeAvailable() { - return g.generateNative(ctx, opts) - } - - // Try npx - if g.npxAvailable() { - return g.generateNpx(ctx, opts) - } - - // Fall back to Docker - return g.generateDocker(ctx, opts) -} - -func (g *TypeScriptGenerator) nativeAvailable() bool { - _, err := exec.LookPath("openapi-typescript-codegen") - return err == nil -} - -func (g *TypeScriptGenerator) npxAvailable() bool { - _, err := exec.LookPath("npx") - return err == nil -} - -func (g *TypeScriptGenerator) generateNative(ctx context.Context, opts Options) error { - cmd := exec.CommandContext(ctx, "openapi-typescript-codegen", - "--input", opts.SpecPath, - "--output", opts.OutputDir, - "--name", opts.PackageName, - ) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - return cmd.Run() -} - -func (g *TypeScriptGenerator) generateNpx(ctx context.Context, opts Options) error { - cmd := exec.CommandContext(ctx, "npx", "openapi-typescript-codegen", - "--input", opts.SpecPath, - "--output", opts.OutputDir, - "--name", opts.PackageName, - ) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - return cmd.Run() -} - -func (g *TypeScriptGenerator) generateDocker(ctx context.Context, opts Options) error { - // Use openapi-generator via Docker - specDir := filepath.Dir(opts.SpecPath) - specName := filepath.Base(opts.SpecPath) - - cmd := exec.CommandContext(ctx, "docker", "run", "--rm", - "-v", specDir+":/spec", - "-v", opts.OutputDir+":/out", - "openapitools/openapi-generator-cli", "generate", - "-i", "/spec/"+specName, - "-g", "typescript-fetch", - "-o", "/out", - "--additional-properties=npmName="+opts.PackageName, - ) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - if err := cmd.Run(); err != nil { - return fmt.Errorf("typescript.generateDocker: %w", err) - } - return nil -} -``` - -**Step 4: Run tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/sdk/generators/... -run TestTypeScriptGenerator -v` -Expected: PASS (or skip if no generator available) - -**Step 5: Commit** - -```bash -git add pkg/sdk/generators/typescript.go pkg/sdk/generators/typescript_test.go -git commit -m "feat(sdk): add TypeScript generator - -Uses openapi-typescript-codegen (native or npx) with Docker fallback. -Generates TypeScript-fetch client from OpenAPI spec. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 5: Implement Python Generator - -**Files:** -- Create: `pkg/sdk/generators/python.go` -- Create: `pkg/sdk/generators/python_test.go` - -**Step 1: Write the failing test** - -```go -package generators - -import ( - "context" - "os" - "path/filepath" - "testing" -) - -func TestPythonGenerator_Good_Available(t *testing.T) { - g := NewPythonGenerator() - _ = g.Available() - _ = g.Language() - _ = g.Install() -} - -func TestPythonGenerator_Good_Generate(t *testing.T) { - g := NewPythonGenerator() - if !g.Available() && !dockerAvailable() { - t.Skip("no Python generator available") - } - - tmpDir := t.TempDir() - specPath := filepath.Join(tmpDir, "spec.yaml") - spec := `openapi: "3.0.0" -info: - title: Test API - version: "1.0.0" -paths: - /health: - get: - operationId: getHealth - responses: - "200": - description: OK -` - os.WriteFile(specPath, []byte(spec), 0644) - - outputDir := filepath.Join(tmpDir, "sdk", "python") - err := g.Generate(context.Background(), Options{ - SpecPath: specPath, - OutputDir: outputDir, - PackageName: "test_api", - Version: "1.0.0", - }) - if err != nil { - t.Fatalf("Generate failed: %v", err) - } - - if _, err := os.Stat(outputDir); os.IsNotExist(err) { - t.Error("output directory not created") - } -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/sdk/generators/... -run TestPythonGenerator -v` -Expected: FAIL - -**Step 3: Write implementation** - -```go -package generators - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" -) - -// PythonGenerator generates Python SDKs using openapi-python-client. -type PythonGenerator struct{} - -// NewPythonGenerator creates a new Python generator. -func NewPythonGenerator() *PythonGenerator { - return &PythonGenerator{} -} - -// Language returns "python". -func (g *PythonGenerator) Language() string { - return "python" -} - -// Available checks if openapi-python-client is installed. -func (g *PythonGenerator) Available() bool { - _, err := exec.LookPath("openapi-python-client") - return err == nil -} - -// Install returns installation instructions. -func (g *PythonGenerator) Install() string { - return "pip install openapi-python-client" -} - -// Generate creates Python SDK from OpenAPI spec. -func (g *PythonGenerator) Generate(ctx context.Context, opts Options) error { - if err := os.MkdirAll(opts.OutputDir, 0755); err != nil { - return fmt.Errorf("python.Generate: failed to create output dir: %w", err) - } - - if g.Available() { - return g.generateNative(ctx, opts) - } - return g.generateDocker(ctx, opts) -} - -func (g *PythonGenerator) generateNative(ctx context.Context, opts Options) error { - // openapi-python-client creates a directory named after the package - // We need to generate into a temp location then move - parentDir := filepath.Dir(opts.OutputDir) - - cmd := exec.CommandContext(ctx, "openapi-python-client", "generate", - "--path", opts.SpecPath, - "--output-path", opts.OutputDir, - ) - cmd.Dir = parentDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - return cmd.Run() -} - -func (g *PythonGenerator) generateDocker(ctx context.Context, opts Options) error { - specDir := filepath.Dir(opts.SpecPath) - specName := filepath.Base(opts.SpecPath) - - cmd := exec.CommandContext(ctx, "docker", "run", "--rm", - "-v", specDir+":/spec", - "-v", opts.OutputDir+":/out", - "openapitools/openapi-generator-cli", "generate", - "-i", "/spec/"+specName, - "-g", "python", - "-o", "/out", - "--additional-properties=packageName="+opts.PackageName, - ) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - return cmd.Run() -} -``` - -**Step 4: Run tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/sdk/generators/... -run TestPythonGenerator -v` -Expected: PASS (or skip) - -**Step 5: Commit** - -```bash -git add pkg/sdk/generators/python.go pkg/sdk/generators/python_test.go -git commit -m "feat(sdk): add Python generator - -Uses openapi-python-client with Docker fallback. -Generates Python client from OpenAPI spec. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 6: Implement Go Generator - -**Files:** -- Create: `pkg/sdk/generators/go.go` -- Create: `pkg/sdk/generators/go_test.go` - -**Step 1: Write the failing test** - -```go -package generators - -import ( - "context" - "os" - "path/filepath" - "testing" -) - -func TestGoGenerator_Good_Available(t *testing.T) { - g := NewGoGenerator() - _ = g.Available() - _ = g.Language() - _ = g.Install() -} - -func TestGoGenerator_Good_Generate(t *testing.T) { - g := NewGoGenerator() - if !g.Available() && !dockerAvailable() { - t.Skip("no Go generator available") - } - - tmpDir := t.TempDir() - specPath := filepath.Join(tmpDir, "spec.yaml") - spec := `openapi: "3.0.0" -info: - title: Test API - version: "1.0.0" -paths: - /health: - get: - operationId: getHealth - responses: - "200": - description: OK -` - os.WriteFile(specPath, []byte(spec), 0644) - - outputDir := filepath.Join(tmpDir, "sdk", "go") - err := g.Generate(context.Background(), Options{ - SpecPath: specPath, - OutputDir: outputDir, - PackageName: "testapi", - Version: "1.0.0", - }) - if err != nil { - t.Fatalf("Generate failed: %v", err) - } - - if _, err := os.Stat(outputDir); os.IsNotExist(err) { - t.Error("output directory not created") - } -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/sdk/generators/... -run TestGoGenerator -v` -Expected: FAIL - -**Step 3: Write implementation** - -```go -package generators - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" -) - -// GoGenerator generates Go SDKs using oapi-codegen. -type GoGenerator struct{} - -// NewGoGenerator creates a new Go generator. -func NewGoGenerator() *GoGenerator { - return &GoGenerator{} -} - -// Language returns "go". -func (g *GoGenerator) Language() string { - return "go" -} - -// Available checks if oapi-codegen is installed. -func (g *GoGenerator) Available() bool { - _, err := exec.LookPath("oapi-codegen") - return err == nil -} - -// Install returns installation instructions. -func (g *GoGenerator) Install() string { - return "go install github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen@latest" -} - -// Generate creates Go SDK from OpenAPI spec. -func (g *GoGenerator) Generate(ctx context.Context, opts Options) error { - if err := os.MkdirAll(opts.OutputDir, 0755); err != nil { - return fmt.Errorf("go.Generate: failed to create output dir: %w", err) - } - - if g.Available() { - return g.generateNative(ctx, opts) - } - return g.generateDocker(ctx, opts) -} - -func (g *GoGenerator) generateNative(ctx context.Context, opts Options) error { - outputFile := filepath.Join(opts.OutputDir, "client.go") - - cmd := exec.CommandContext(ctx, "oapi-codegen", - "-package", opts.PackageName, - "-generate", "types,client", - "-o", outputFile, - opts.SpecPath, - ) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - if err := cmd.Run(); err != nil { - return fmt.Errorf("go.generateNative: %w", err) - } - - // Create go.mod - goMod := fmt.Sprintf("module %s\n\ngo 1.21\n", opts.PackageName) - return os.WriteFile(filepath.Join(opts.OutputDir, "go.mod"), []byte(goMod), 0644) -} - -func (g *GoGenerator) generateDocker(ctx context.Context, opts Options) error { - specDir := filepath.Dir(opts.SpecPath) - specName := filepath.Base(opts.SpecPath) - - cmd := exec.CommandContext(ctx, "docker", "run", "--rm", - "-v", specDir+":/spec", - "-v", opts.OutputDir+":/out", - "openapitools/openapi-generator-cli", "generate", - "-i", "/spec/"+specName, - "-g", "go", - "-o", "/out", - "--additional-properties=packageName="+opts.PackageName, - ) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - return cmd.Run() -} -``` - -**Step 4: Run tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/sdk/generators/... -run TestGoGenerator -v` -Expected: PASS (or skip) - -**Step 5: Commit** - -```bash -git add pkg/sdk/generators/go.go pkg/sdk/generators/go_test.go -git commit -m "feat(sdk): add Go generator - -Uses oapi-codegen with Docker fallback. -Generates Go client and types from OpenAPI spec. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 7: Implement PHP Generator - -**Files:** -- Create: `pkg/sdk/generators/php.go` -- Create: `pkg/sdk/generators/php_test.go` - -**Step 1: Write the failing test** - -```go -package generators - -import ( - "context" - "os" - "path/filepath" - "testing" -) - -func TestPHPGenerator_Good_Available(t *testing.T) { - g := NewPHPGenerator() - _ = g.Available() - _ = g.Language() - _ = g.Install() -} - -func TestPHPGenerator_Good_Generate(t *testing.T) { - g := NewPHPGenerator() - if !g.Available() { - t.Skip("Docker not available for PHP generator") - } - - tmpDir := t.TempDir() - specPath := filepath.Join(tmpDir, "spec.yaml") - spec := `openapi: "3.0.0" -info: - title: Test API - version: "1.0.0" -paths: - /health: - get: - operationId: getHealth - responses: - "200": - description: OK -` - os.WriteFile(specPath, []byte(spec), 0644) - - outputDir := filepath.Join(tmpDir, "sdk", "php") - err := g.Generate(context.Background(), Options{ - SpecPath: specPath, - OutputDir: outputDir, - PackageName: "TestApi", - Version: "1.0.0", - }) - if err != nil { - t.Fatalf("Generate failed: %v", err) - } - - if _, err := os.Stat(outputDir); os.IsNotExist(err) { - t.Error("output directory not created") - } -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/sdk/generators/... -run TestPHPGenerator -v` -Expected: FAIL - -**Step 3: Write implementation** - -```go -package generators - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" -) - -// PHPGenerator generates PHP SDKs using openapi-generator (Docker). -type PHPGenerator struct{} - -// NewPHPGenerator creates a new PHP generator. -func NewPHPGenerator() *PHPGenerator { - return &PHPGenerator{} -} - -// Language returns "php". -func (g *PHPGenerator) Language() string { - return "php" -} - -// Available checks if Docker is available. -func (g *PHPGenerator) Available() bool { - _, err := exec.LookPath("docker") - return err == nil -} - -// Install returns installation instructions. -func (g *PHPGenerator) Install() string { - return "Docker is required for PHP SDK generation" -} - -// Generate creates PHP SDK from OpenAPI spec using Docker. -func (g *PHPGenerator) Generate(ctx context.Context, opts Options) error { - if !g.Available() { - return fmt.Errorf("php.Generate: Docker is required but not available") - } - - if err := os.MkdirAll(opts.OutputDir, 0755); err != nil { - return fmt.Errorf("php.Generate: failed to create output dir: %w", err) - } - - specDir := filepath.Dir(opts.SpecPath) - specName := filepath.Base(opts.SpecPath) - - cmd := exec.CommandContext(ctx, "docker", "run", "--rm", - "-v", specDir+":/spec", - "-v", opts.OutputDir+":/out", - "openapitools/openapi-generator-cli", "generate", - "-i", "/spec/"+specName, - "-g", "php", - "-o", "/out", - "--additional-properties=invokerPackage="+opts.PackageName, - ) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - if err := cmd.Run(); err != nil { - return fmt.Errorf("php.Generate: %w", err) - } - return nil -} -``` - -**Step 4: Run tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/sdk/generators/... -run TestPHPGenerator -v` -Expected: PASS (or skip) - -**Step 5: Commit** - -```bash -git add pkg/sdk/generators/php.go pkg/sdk/generators/php_test.go -git commit -m "feat(sdk): add PHP generator - -Uses openapi-generator via Docker. -Generates PHP client from OpenAPI spec. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 8: Implement Breaking Change Detection - -**Files:** -- Create: `pkg/sdk/diff.go` -- Create: `pkg/sdk/diff_test.go` - -**Step 1: Write the failing test** - -```go -package sdk - -import ( - "os" - "path/filepath" - "testing" -) - -func TestDiff_Good_NoBreaking(t *testing.T) { - tmpDir := t.TempDir() - - baseSpec := `openapi: "3.0.0" -info: - title: Test API - version: "1.0.0" -paths: - /health: - get: - operationId: getHealth - responses: - "200": - description: OK -` - revSpec := `openapi: "3.0.0" -info: - title: Test API - version: "1.1.0" -paths: - /health: - get: - operationId: getHealth - responses: - "200": - description: OK - /status: - get: - operationId: getStatus - responses: - "200": - description: OK -` - basePath := filepath.Join(tmpDir, "base.yaml") - revPath := filepath.Join(tmpDir, "rev.yaml") - os.WriteFile(basePath, []byte(baseSpec), 0644) - os.WriteFile(revPath, []byte(revSpec), 0644) - - result, err := Diff(basePath, revPath) - if err != nil { - t.Fatalf("Diff failed: %v", err) - } - if result.Breaking { - t.Error("expected no breaking changes for adding endpoint") - } -} - -func TestDiff_Good_Breaking(t *testing.T) { - tmpDir := t.TempDir() - - baseSpec := `openapi: "3.0.0" -info: - title: Test API - version: "1.0.0" -paths: - /health: - get: - operationId: getHealth - responses: - "200": - description: OK - /users: - get: - operationId: getUsers - responses: - "200": - description: OK -` - revSpec := `openapi: "3.0.0" -info: - title: Test API - version: "2.0.0" -paths: - /health: - get: - operationId: getHealth - responses: - "200": - description: OK -` - basePath := filepath.Join(tmpDir, "base.yaml") - revPath := filepath.Join(tmpDir, "rev.yaml") - os.WriteFile(basePath, []byte(baseSpec), 0644) - os.WriteFile(revPath, []byte(revSpec), 0644) - - result, err := Diff(basePath, revPath) - if err != nil { - t.Fatalf("Diff failed: %v", err) - } - if !result.Breaking { - t.Error("expected breaking change for removed endpoint") - } -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/sdk/... -run TestDiff -v` -Expected: FAIL (Diff not defined) - -**Step 3: Add oasdiff dependency** - -Run: `cd /Users/snider/Code/Core/pkg/sdk && go get github.com/tufin/oasdiff@latest github.com/getkin/kin-openapi@latest` - -**Step 4: Write implementation** - -```go -package sdk - -import ( - "fmt" - - "github.com/getkin/kin-openapi/openapi3" - "github.com/tufin/oasdiff/checker" - "github.com/tufin/oasdiff/diff" - "github.com/tufin/oasdiff/load" -) - -// DiffResult holds the result of comparing two OpenAPI specs. -type DiffResult struct { - // Breaking is true if breaking changes were detected. - Breaking bool - // Changes is the list of breaking changes. - Changes []string - // Summary is a human-readable summary. - Summary string -} - -// Diff compares two OpenAPI specs and detects breaking changes. -func Diff(basePath, revisionPath string) (*DiffResult, error) { - loader := openapi3.NewLoader() - loader.IsExternalRefsAllowed = true - - // Load specs - baseSpec, err := load.NewSpecInfo(loader, load.NewSource(basePath)) - if err != nil { - return nil, fmt.Errorf("sdk.Diff: failed to load base spec: %w", err) - } - - revSpec, err := load.NewSpecInfo(loader, load.NewSource(revisionPath)) - if err != nil { - return nil, fmt.Errorf("sdk.Diff: failed to load revision spec: %w", err) - } - - // Compute diff - diffResult, err := diff.Get(diff.NewConfig(), baseSpec.Spec, revSpec.Spec) - if err != nil { - return nil, fmt.Errorf("sdk.Diff: failed to compute diff: %w", err) - } - - // Check for breaking changes - config := checker.GetAllChecks() - breaks := checker.CheckBackwardCompatibilityUntilLevel( - config, - diffResult, - baseSpec.Spec, - revSpec.Spec, - checker.ERR, // Only errors (breaking changes) - ) - - // Build result - result := &DiffResult{ - Breaking: len(breaks) > 0, - Changes: make([]string, 0, len(breaks)), - } - - for _, b := range breaks { - result.Changes = append(result.Changes, b.GetUncolorizedText(checker.NewDefaultLocalizer())) - } - - if result.Breaking { - result.Summary = fmt.Sprintf("%d breaking change(s) detected", len(breaks)) - } else { - result.Summary = "No breaking changes" - } - - return result, nil -} - -// DiffExitCode returns the exit code for CI integration. -// 0 = no breaking changes, 1 = breaking changes, 2 = error -func DiffExitCode(result *DiffResult, err error) int { - if err != nil { - return 2 - } - if result.Breaking { - return 1 - } - return 0 -} -``` - -**Step 5: Run tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/sdk/... -run TestDiff -v` -Expected: PASS - -**Step 6: Commit** - -```bash -git add pkg/sdk/diff.go pkg/sdk/diff_test.go pkg/sdk/go.mod pkg/sdk/go.sum -git commit -m "feat(sdk): add breaking change detection with oasdiff - -Compares OpenAPI specs to detect breaking changes: -- Removed endpoints -- Changed required parameters -- Modified response schemas - -Returns CI-friendly exit codes (0=ok, 1=breaking, 2=error). - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 9: Wire Up Generate Command - -**Files:** -- Modify: `pkg/sdk/sdk.go` - -**Step 1: Update SDK.Generate to use generators** - -```go -// Add to sdk.go, replacing the stub Generate method - -import ( - "github.com/host-uk/core/pkg/sdk/generators" -) - -// Generate generates SDKs for all configured languages. -func (s *SDK) Generate(ctx context.Context) error { - // Detect spec - specPath, err := s.DetectSpec() - if err != nil { - return err - } - - // Create registry with all generators - registry := generators.NewRegistry() - registry.Register(generators.NewTypeScriptGenerator()) - registry.Register(generators.NewPythonGenerator()) - registry.Register(generators.NewGoGenerator()) - registry.Register(generators.NewPHPGenerator()) - - // Generate for each language - for _, lang := range s.config.Languages { - if err := s.GenerateLanguage(ctx, lang); err != nil { - return err - } - } - - return nil -} - -// GenerateLanguage generates SDK for a specific language. -func (s *SDK) GenerateLanguage(ctx context.Context, lang string) error { - specPath, err := s.DetectSpec() - if err != nil { - return err - } - - registry := generators.NewRegistry() - registry.Register(generators.NewTypeScriptGenerator()) - registry.Register(generators.NewPythonGenerator()) - registry.Register(generators.NewGoGenerator()) - registry.Register(generators.NewPHPGenerator()) - - gen, ok := registry.Get(lang) - if !ok { - return fmt.Errorf("sdk.GenerateLanguage: unknown language: %s", lang) - } - - if !gen.Available() { - fmt.Printf("Warning: %s generator not available. Install with: %s\n", lang, gen.Install()) - fmt.Printf("Falling back to Docker...\n") - } - - outputDir := filepath.Join(s.projectDir, s.config.Output, lang) - opts := generators.Options{ - SpecPath: specPath, - OutputDir: outputDir, - PackageName: s.config.Package.Name, - Version: s.config.Package.Version, - } - - fmt.Printf("Generating %s SDK...\n", lang) - if err := gen.Generate(ctx, opts); err != nil { - return fmt.Errorf("sdk.GenerateLanguage: %s generation failed: %w", lang, err) - } - fmt.Printf("Generated %s SDK at %s\n", lang, outputDir) - - return nil -} -``` - -**Step 2: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./pkg/sdk/...` -Expected: No errors - -**Step 3: Commit** - -```bash -git add pkg/sdk/sdk.go -git commit -m "feat(sdk): wire up Generate to use all generators - -SDK.Generate() and SDK.GenerateLanguage() now use the -generator registry to generate SDKs for configured languages. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 10: Add CLI Commands - -**Files:** -- Create: `cmd/core/cmd/sdk.go` - -**Step 1: Create SDK command file** - -```go -package cmd - -import ( - "context" - "fmt" - "os" - - "github.com/charmbracelet/lipgloss" - "github.com/host-uk/core/pkg/sdk" - "github.com/leaanthony/clir" -) - -var ( - sdkHeaderStyle = lipgloss.NewStyle(). - Bold(true). - Foreground(lipgloss.Color("#3b82f6")) - - sdkSuccessStyle = lipgloss.NewStyle(). - Bold(true). - Foreground(lipgloss.Color("#22c55e")) - - sdkErrorStyle = lipgloss.NewStyle(). - Bold(true). - Foreground(lipgloss.Color("#ef4444")) - - sdkDimStyle = lipgloss.NewStyle(). - Foreground(lipgloss.Color("#6b7280")) -) - -// AddSDKCommand adds the sdk command and its subcommands. -func AddSDKCommand(app *clir.Cli) { - sdkCmd := app.NewSubCommand("sdk", "Generate and manage API SDKs") - sdkCmd.LongDescription("Generate typed API clients from OpenAPI specs.\n" + - "Supports TypeScript, Python, Go, and PHP.") - - // sdk generate - genCmd := sdkCmd.NewSubCommand("generate", "Generate SDKs from OpenAPI spec") - var specPath, lang string - genCmd.StringFlag("spec", "Path to OpenAPI spec file", &specPath) - genCmd.StringFlag("lang", "Generate only this language", &lang) - genCmd.Action(func() error { - return runSDKGenerate(specPath, lang) - }) - - // sdk diff - diffCmd := sdkCmd.NewSubCommand("diff", "Check for breaking API changes") - var basePath string - diffCmd.StringFlag("base", "Base spec (version tag or file)", &basePath) - diffCmd.StringFlag("spec", "Current spec file", &specPath) - diffCmd.Action(func() error { - return runSDKDiff(basePath, specPath) - }) - - // sdk validate - validateCmd := sdkCmd.NewSubCommand("validate", "Validate OpenAPI spec") - validateCmd.StringFlag("spec", "Path to OpenAPI spec file", &specPath) - validateCmd.Action(func() error { - return runSDKValidate(specPath) - }) -} - -func runSDKGenerate(specPath, lang string) error { - ctx := context.Background() - - projectDir, err := os.Getwd() - if err != nil { - return fmt.Errorf("failed to get working directory: %w", err) - } - - // Load config - config := sdk.DefaultConfig() - if specPath != "" { - config.Spec = specPath - } - - s := sdk.New(projectDir, config) - - fmt.Printf("%s Generating SDKs\n", sdkHeaderStyle.Render("SDK:")) - - if lang != "" { - // Generate single language - if err := s.GenerateLanguage(ctx, lang); err != nil { - fmt.Printf("%s %v\n", sdkErrorStyle.Render("Error:"), err) - return err - } - } else { - // Generate all - if err := s.Generate(ctx); err != nil { - fmt.Printf("%s %v\n", sdkErrorStyle.Render("Error:"), err) - return err - } - } - - fmt.Printf("%s SDK generation complete\n", sdkSuccessStyle.Render("Success:")) - return nil -} - -func runSDKDiff(basePath, specPath string) error { - projectDir, err := os.Getwd() - if err != nil { - return fmt.Errorf("failed to get working directory: %w", err) - } - - // Detect current spec if not provided - if specPath == "" { - s := sdk.New(projectDir, nil) - specPath, err = s.DetectSpec() - if err != nil { - return err - } - } - - if basePath == "" { - return fmt.Errorf("--base is required (version tag or file path)") - } - - fmt.Printf("%s Checking for breaking changes\n", sdkHeaderStyle.Render("SDK Diff:")) - fmt.Printf(" Base: %s\n", sdkDimStyle.Render(basePath)) - fmt.Printf(" Current: %s\n", sdkDimStyle.Render(specPath)) - fmt.Println() - - result, err := sdk.Diff(basePath, specPath) - if err != nil { - fmt.Printf("%s %v\n", sdkErrorStyle.Render("Error:"), err) - os.Exit(2) - } - - if result.Breaking { - fmt.Printf("%s %s\n", sdkErrorStyle.Render("Breaking:"), result.Summary) - for _, change := range result.Changes { - fmt.Printf(" - %s\n", change) - } - os.Exit(1) - } - - fmt.Printf("%s %s\n", sdkSuccessStyle.Render("OK:"), result.Summary) - return nil -} - -func runSDKValidate(specPath string) error { - projectDir, err := os.Getwd() - if err != nil { - return fmt.Errorf("failed to get working directory: %w", err) - } - - s := sdk.New(projectDir, &sdk.Config{Spec: specPath}) - - fmt.Printf("%s Validating OpenAPI spec\n", sdkHeaderStyle.Render("SDK:")) - - detectedPath, err := s.DetectSpec() - if err != nil { - fmt.Printf("%s %v\n", sdkErrorStyle.Render("Error:"), err) - return err - } - - fmt.Printf(" Spec: %s\n", sdkDimStyle.Render(detectedPath)) - fmt.Printf("%s Spec is valid\n", sdkSuccessStyle.Render("OK:")) - return nil -} -``` - -**Step 2: Register command in root.go** - -Add to root.go after other command registrations: -```go -AddSDKCommand(app) -``` - -**Step 3: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./cmd/core/...` -Expected: No errors - -**Step 4: Commit** - -```bash -git add cmd/core/cmd/sdk.go cmd/core/cmd/root.go -git commit -m "feat(cli): add sdk command with generate, diff, validate - -Commands: -- core sdk generate [--spec FILE] [--lang LANG] -- core sdk diff --base VERSION [--spec FILE] -- core sdk validate [--spec FILE] - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 11: Add SDK Config to Release Config - -**Files:** -- Modify: `pkg/release/config.go` - -**Step 1: Add SDK field to Config** - -Add to Config struct in config.go: -```go -// SDK configures SDK generation. -SDK *SDKConfig `yaml:"sdk,omitempty"` -``` - -Add SDKConfig type: -```go -// SDKConfig holds SDK generation configuration. -type SDKConfig struct { - // Spec is the path to the OpenAPI spec file. - Spec string `yaml:"spec,omitempty"` - // Languages to generate. - Languages []string `yaml:"languages,omitempty"` - // Output directory (default: sdk/). - Output string `yaml:"output,omitempty"` - // Package naming. - Package SDKPackageConfig `yaml:"package,omitempty"` - // Diff configuration. - Diff SDKDiffConfig `yaml:"diff,omitempty"` - // Publish configuration. - Publish SDKPublishConfig `yaml:"publish,omitempty"` -} - -// SDKPackageConfig holds package naming configuration. -type SDKPackageConfig struct { - Name string `yaml:"name,omitempty"` - Version string `yaml:"version,omitempty"` -} - -// SDKDiffConfig holds diff configuration. -type SDKDiffConfig struct { - Enabled bool `yaml:"enabled,omitempty"` - FailOnBreaking bool `yaml:"fail_on_breaking,omitempty"` -} - -// SDKPublishConfig holds monorepo publish configuration. -type SDKPublishConfig struct { - Repo string `yaml:"repo,omitempty"` - Path string `yaml:"path,omitempty"` -} -``` - -**Step 2: Verify it compiles** - -Run: `cd /Users/snider/Code/Core && go build ./pkg/release/...` -Expected: No errors - -**Step 3: Commit** - -```bash -git add pkg/release/config.go -git commit -m "feat(release): add SDK configuration to release.yaml - -Adds sdk: section to .core/release.yaml for configuring -OpenAPI SDK generation during releases. - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 12: Add SDK Example to Docs - -**Files:** -- Create: `docs/examples/sdk-full.yaml` - -**Step 1: Create example file** - -```yaml -# Example: Full SDK Configuration -# Generate typed API clients from OpenAPI specs - -sdk: - # OpenAPI spec source (auto-detected if omitted) - spec: api/openapi.yaml - - # Languages to generate - languages: - - typescript - - python - - go - - php - - # Output directory (default: sdk/) - output: sdk/ - - # Package naming - package: - name: myapi - version: "{{.Version}}" - - # Breaking change detection - diff: - enabled: true - fail_on_breaking: true # CI fails on breaking changes - - # Optional: publish to monorepo - publish: - repo: myorg/sdks - path: packages/myapi - -# Required tools (install one per language): -# TypeScript: npm i -g openapi-typescript-codegen (or Docker) -# Python: pip install openapi-python-client (or Docker) -# Go: go install github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen@latest -# PHP: Docker required -# -# Usage: -# core sdk generate # Generate all configured languages -# core sdk generate --lang go # Generate single language -# core sdk diff --base v1.0.0 # Check for breaking changes -# core sdk validate # Validate spec -``` - -**Step 2: Commit** - -```bash -git add docs/examples/sdk-full.yaml -git commit -m "docs: add SDK configuration example - -Shows full SDK config with all options: -- Language selection -- Breaking change detection -- Monorepo publishing - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -### Task 13: Final Integration Test - -**Step 1: Build and verify CLI** - -Run: `cd /Users/snider/Code/Core && go build -o bin/core ./cmd/core && ./bin/core sdk --help` -Expected: Shows sdk command help - -**Step 2: Run all tests** - -Run: `cd /Users/snider/Code/Core && go test ./pkg/sdk/... -v` -Expected: All tests pass - -**Step 3: Final commit if needed** - -```bash -git add -A -git commit -m "chore(sdk): finalize S3.4 SDK generation - -All SDK generation features complete: -- OpenAPI spec detection -- TypeScript, Python, Go, PHP generators -- Breaking change detection with oasdiff -- CLI commands (generate, diff, validate) -- Integration with release config - -Co-Authored-By: Claude Opus 4.5 " -``` - ---- - -## Summary - -13 tasks covering: -1. Package structure -2. Spec detection -3. Generator interface -4. TypeScript generator -5. Python generator -6. Go generator -7. PHP generator -8. Breaking change detection -9. Wire up Generate -10. CLI commands -11. Release config integration -12. Documentation example -13. Integration test diff --git a/tasks/plans/2026-01-29-sdk-release-design.md b/tasks/plans/2026-01-29-sdk-release-design.md deleted file mode 100644 index 37a26ae..0000000 --- a/tasks/plans/2026-01-29-sdk-release-design.md +++ /dev/null @@ -1,210 +0,0 @@ -# SDK Release Integration Design (S3.4) - -## Summary - -Add `core release --target sdk` to generate SDKs as a separate release target. Runs breaking change detection before generating, uses release version for SDK versioning, outputs locally for manual publishing. - -## Design Decisions - -- **Separate target**: `--target sdk` runs ONLY SDK generation (no binary builds) -- **Local output**: Generates to `sdk/` directory, user handles publishing -- **Diff first**: Run breaking change detection before generating -- **Match version**: SDK version matches release version from git tags - -## CLI - -```bash -core release --target sdk # Generate SDKs only -core release --target sdk --version v1.2.3 # Explicit version -core release --target sdk --dry-run # Preview what would generate -core release # Normal release (unchanged) -``` - -## Config Schema - -In `.core/release.yaml`: - -```yaml -sdk: - spec: openapi.yaml # or auto-detect - languages: [typescript, python, go, php] - output: sdk # output directory - package: - name: myapi-sdk - diff: - enabled: true - fail_on_breaking: false # warn but continue -``` - -## Flow - -``` -core release --target sdk - ↓ -1. Load release config (.core/release.yaml) - ↓ -2. Check sdk config exists (error if not configured) - ↓ -3. Determine version (git tag or --version flag) - ↓ -4. If diff.enabled: - - Get previous tag - - Run oasdiff against current spec - - If breaking && fail_on_breaking: abort - - If breaking && !fail_on_breaking: warn, continue - ↓ -5. Generate SDKs for each language - - Pass version to generators - - Output to sdk/{language}/ - ↓ -6. Print summary (languages generated, output paths) -``` - -## Package Structure - -``` -pkg/release/ -├── sdk.go # RunSDK() orchestration + diff helper ← NEW -├── release.go # Existing Run() unchanged -└── config.go # Existing SDKConfig unchanged - -pkg/sdk/ -└── sdk.go # Add SetVersion() method ← MODIFY - -cmd/core/cmd/ -└── release.go # Add --target flag ← MODIFY -``` - -## RunSDK Implementation - -```go -// pkg/release/sdk.go - -// RunSDK executes SDK-only release: diff check + generate. -func RunSDK(ctx context.Context, cfg *Config, dryRun bool) (*SDKRelease, error) { - if cfg.SDK == nil { - return nil, fmt.Errorf("sdk not configured in .core/release.yaml") - } - - projectDir := cfg.projectDir - if projectDir == "" { - projectDir = "." - } - - // Determine version - version := cfg.version - if version == "" { - var err error - version, err = DetermineVersion(projectDir) - if err != nil { - return nil, fmt.Errorf("failed to determine version: %w", err) - } - } - - // Run diff check if enabled - if cfg.SDK.Diff.Enabled { - breaking, err := checkBreakingChanges(projectDir, cfg.SDK) - if err != nil { - // Non-fatal: warn and continue - fmt.Printf("Warning: diff check failed: %v\n", err) - } else if breaking { - if cfg.SDK.Diff.FailOnBreaking { - return nil, fmt.Errorf("breaking API changes detected") - } - fmt.Printf("Warning: breaking API changes detected\n") - } - } - - if dryRun { - return &SDKRelease{ - Version: version, - Languages: cfg.SDK.Languages, - Output: cfg.SDK.Output, - }, nil - } - - // Generate SDKs - sdkCfg := toSDKConfig(cfg.SDK) - s := sdk.New(projectDir, sdkCfg) - s.SetVersion(version) - - if err := s.Generate(ctx); err != nil { - return nil, fmt.Errorf("sdk generation failed: %w", err) - } - - return &SDKRelease{ - Version: version, - Languages: cfg.SDK.Languages, - Output: cfg.SDK.Output, - }, nil -} - -// SDKRelease holds the result of an SDK release. -type SDKRelease struct { - Version string - Languages []string - Output string -} -``` - -## CLI Integration - -```go -// cmd/core/cmd/release.go - -var target string -releaseCmd.StringFlag("target", "Release target (sdk)", &target) - -releaseCmd.Action(func() error { - if target == "sdk" { - return runReleaseSDK(dryRun, version) - } - return runRelease(dryRun, version, draft, prerelease) -}) - -func runReleaseSDK(dryRun bool, version string) error { - ctx := context.Background() - projectDir, _ := os.Getwd() - - cfg, err := release.LoadConfig(projectDir) - if err != nil { - return err - } - - if version != "" { - cfg.SetVersion(version) - } - - fmt.Printf("%s Generating SDKs\n", releaseHeaderStyle.Render("SDK Release:")) - if dryRun { - fmt.Printf(" %s\n", releaseDimStyle.Render("(dry-run mode)")) - } - - result, err := release.RunSDK(ctx, cfg, dryRun) - if err != nil { - fmt.Printf("%s %v\n", releaseErrorStyle.Render("Error:"), err) - return err - } - - fmt.Printf("%s SDK generation complete\n", releaseSuccessStyle.Render("Success:")) - fmt.Printf(" Version: %s\n", result.Version) - fmt.Printf(" Languages: %v\n", result.Languages) - fmt.Printf(" Output: %s/\n", result.Output) - - return nil -} -``` - -## Implementation Steps - -1. Add `SetVersion()` method to `pkg/sdk/sdk.go` -2. Create `pkg/release/sdk.go` with `RunSDK()` and helpers -3. Add `--target` flag to `cmd/core/cmd/release.go` -4. Add `runReleaseSDK()` function to CLI -5. Add tests for `pkg/release/sdk_test.go` -6. Final verification and TODO update - -## Dependencies - -- `oasdiff` CLI (for breaking change detection) -- Existing SDK generators (openapi-generator, etc.) diff --git a/tasks/plans/2026-01-29-sdk-release-impl.md b/tasks/plans/2026-01-29-sdk-release-impl.md deleted file mode 100644 index 78d26e1..0000000 --- a/tasks/plans/2026-01-29-sdk-release-impl.md +++ /dev/null @@ -1,576 +0,0 @@ -# SDK Release Implementation Plan (S3.4) - -> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. - -**Goal:** Add `core release --target sdk` to generate SDKs with version and diff checking - -**Architecture:** Separate release target that runs diff check then SDK generation, outputs locally - -**Tech Stack:** Go, existing pkg/sdk generators, oasdiff for diff - ---- - -## Task 1: Add SetVersion to SDK struct - -**Files:** -- Modify: `pkg/sdk/sdk.go` -- Test: `pkg/sdk/sdk_test.go` (create if needed) - -**Step 1: Write the failing test** - -```go -// pkg/sdk/sdk_test.go -package sdk - -import ( - "testing" -) - -func TestSDK_Good_SetVersion(t *testing.T) { - s := New("/tmp", nil) - s.SetVersion("v1.2.3") - - if s.version != "v1.2.3" { - t.Errorf("expected version v1.2.3, got %s", s.version) - } -} - -func TestSDK_Good_VersionPassedToGenerator(t *testing.T) { - config := &Config{ - Languages: []string{"typescript"}, - Output: "sdk", - Package: PackageConfig{ - Name: "test-sdk", - }, - } - s := New("/tmp", config) - s.SetVersion("v2.0.0") - - // Version should override config - if s.config.Package.Version != "v2.0.0" { - t.Errorf("expected config version v2.0.0, got %s", s.config.Package.Version) - } -} -``` - -**Step 2: Run test to verify it fails** - -Run: `go test ./pkg/sdk/... -run TestSDK_Good_SetVersion -v` -Expected: FAIL with "s.version undefined" or similar - -**Step 3: Write minimal implementation** - -Add to `pkg/sdk/sdk.go`: - -```go -// SDK struct - add version field -type SDK struct { - config *Config - projectDir string - version string // ADD THIS -} - -// SetVersion sets the SDK version, overriding config. -func (s *SDK) SetVersion(version string) { - s.version = version - if s.config != nil { - s.config.Package.Version = version - } -} -``` - -**Step 4: Run test to verify it passes** - -Run: `go test ./pkg/sdk/... -run TestSDK_Good -v` -Expected: PASS - -**Step 5: Commit** - -```bash -git add pkg/sdk/sdk.go pkg/sdk/sdk_test.go -git commit -m "feat(sdk): add SetVersion method for release integration" -``` - ---- - -## Task 2: Create pkg/release/sdk.go structure - -**Files:** -- Create: `pkg/release/sdk.go` - -**Step 1: Create file with types and helper** - -```go -// pkg/release/sdk.go -package release - -import ( - "context" - "fmt" - - "github.com/host-uk/core/pkg/sdk" -) - -// SDKRelease holds the result of an SDK release. -type SDKRelease struct { - // Version is the SDK version. - Version string - // Languages that were generated. - Languages []string - // Output directory. - Output string -} - -// toSDKConfig converts release.SDKConfig to sdk.Config. -func toSDKConfig(cfg *SDKConfig) *sdk.Config { - if cfg == nil { - return nil - } - return &sdk.Config{ - Spec: cfg.Spec, - Languages: cfg.Languages, - Output: cfg.Output, - Package: sdk.PackageConfig{ - Name: cfg.Package.Name, - Version: cfg.Package.Version, - }, - Diff: sdk.DiffConfig{ - Enabled: cfg.Diff.Enabled, - FailOnBreaking: cfg.Diff.FailOnBreaking, - }, - } -} -``` - -**Step 2: Verify it compiles** - -Run: `go build ./pkg/release/...` -Expected: Success - -**Step 3: Commit** - -```bash -git add pkg/release/sdk.go -git commit -m "feat(release): add SDK release types and config converter" -``` - ---- - -## Task 3: Implement RunSDK function - -**Files:** -- Modify: `pkg/release/sdk.go` -- Test: `pkg/release/sdk_test.go` - -**Step 1: Write the failing test** - -```go -// pkg/release/sdk_test.go -package release - -import ( - "context" - "testing" -) - -func TestRunSDK_Bad_NoConfig(t *testing.T) { - cfg := &Config{ - SDK: nil, - } - cfg.projectDir = "/tmp" - - _, err := RunSDK(context.Background(), cfg, true) - if err == nil { - t.Error("expected error when SDK config is nil") - } -} - -func TestRunSDK_Good_DryRun(t *testing.T) { - cfg := &Config{ - SDK: &SDKConfig{ - Languages: []string{"typescript", "python"}, - Output: "sdk", - }, - } - cfg.projectDir = "/tmp" - cfg.version = "v1.0.0" - - result, err := RunSDK(context.Background(), cfg, true) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - - if result.Version != "v1.0.0" { - t.Errorf("expected version v1.0.0, got %s", result.Version) - } - if len(result.Languages) != 2 { - t.Errorf("expected 2 languages, got %d", len(result.Languages)) - } -} -``` - -**Step 2: Run test to verify it fails** - -Run: `go test ./pkg/release/... -run TestRunSDK -v` -Expected: FAIL with "RunSDK undefined" - -**Step 3: Write implementation** - -Add to `pkg/release/sdk.go`: - -```go -// RunSDK executes SDK-only release: diff check + generate. -// If dryRun is true, it shows what would be done without generating. -func RunSDK(ctx context.Context, cfg *Config, dryRun bool) (*SDKRelease, error) { - if cfg == nil { - return nil, fmt.Errorf("release.RunSDK: config is nil") - } - if cfg.SDK == nil { - return nil, fmt.Errorf("release.RunSDK: sdk not configured in .core/release.yaml") - } - - projectDir := cfg.projectDir - if projectDir == "" { - projectDir = "." - } - - // Determine version - version := cfg.version - if version == "" { - var err error - version, err = DetermineVersion(projectDir) - if err != nil { - return nil, fmt.Errorf("release.RunSDK: failed to determine version: %w", err) - } - } - - // Run diff check if enabled - if cfg.SDK.Diff.Enabled { - breaking, err := checkBreakingChanges(projectDir, cfg.SDK) - if err != nil { - // Non-fatal: warn and continue - fmt.Printf("Warning: diff check failed: %v\n", err) - } else if breaking { - if cfg.SDK.Diff.FailOnBreaking { - return nil, fmt.Errorf("release.RunSDK: breaking API changes detected") - } - fmt.Printf("Warning: breaking API changes detected\n") - } - } - - // Prepare result - output := cfg.SDK.Output - if output == "" { - output = "sdk" - } - - result := &SDKRelease{ - Version: version, - Languages: cfg.SDK.Languages, - Output: output, - } - - if dryRun { - return result, nil - } - - // Generate SDKs - sdkCfg := toSDKConfig(cfg.SDK) - s := sdk.New(projectDir, sdkCfg) - s.SetVersion(version) - - if err := s.Generate(ctx); err != nil { - return nil, fmt.Errorf("release.RunSDK: generation failed: %w", err) - } - - return result, nil -} - -// checkBreakingChanges runs oasdiff to detect breaking changes. -func checkBreakingChanges(projectDir string, cfg *SDKConfig) (bool, error) { - // Get previous tag for comparison - prevTag, err := getPreviousTag(projectDir) - if err != nil { - return false, fmt.Errorf("no previous tag found: %w", err) - } - - // Detect spec path - specPath := cfg.Spec - if specPath == "" { - s := sdk.New(projectDir, nil) - specPath, err = s.DetectSpec() - if err != nil { - return false, err - } - } - - // Run diff - result, err := sdk.Diff(prevTag, specPath) - if err != nil { - return false, err - } - - return result.Breaking, nil -} - -// getPreviousTag gets the most recent tag before HEAD. -func getPreviousTag(projectDir string) (string, error) { - // Use git describe to get previous tag - // This is a simplified version - may need refinement - cmd := exec.Command("git", "describe", "--tags", "--abbrev=0", "HEAD^") - cmd.Dir = projectDir - out, err := cmd.Output() - if err != nil { - return "", err - } - return strings.TrimSpace(string(out)), nil -} -``` - -Add import for `os/exec` and `strings`. - -**Step 4: Run test to verify it passes** - -Run: `go test ./pkg/release/... -run TestRunSDK -v` -Expected: PASS - -**Step 5: Commit** - -```bash -git add pkg/release/sdk.go pkg/release/sdk_test.go -git commit -m "feat(release): implement RunSDK for SDK-only releases" -``` - ---- - -## Task 4: Add --target flag to CLI - -**Files:** -- Modify: `cmd/core/cmd/release.go` - -**Step 1: Add target flag and routing** - -In `AddReleaseCommand`, add: - -```go -var target string -releaseCmd.StringFlag("target", "Release target (sdk)", &target) - -// Update the action -releaseCmd.Action(func() error { - if target == "sdk" { - return runReleaseSDK(dryRun, version) - } - return runRelease(dryRun, version, draft, prerelease) -}) -``` - -**Step 2: Verify it compiles** - -Run: `go build ./cmd/core/...` -Expected: FAIL with "runReleaseSDK undefined" - -**Step 3: Commit partial progress** - -```bash -git add cmd/core/cmd/release.go -git commit -m "feat(cli): add --target flag to release command" -``` - ---- - -## Task 5: Implement runReleaseSDK CLI function - -**Files:** -- Modify: `cmd/core/cmd/release.go` - -**Step 1: Add the function** - -```go -// runReleaseSDK executes SDK-only release. -func runReleaseSDK(dryRun bool, version string) error { - ctx := context.Background() - - projectDir, err := os.Getwd() - if err != nil { - return fmt.Errorf("failed to get working directory: %w", err) - } - - // Load configuration - cfg, err := release.LoadConfig(projectDir) - if err != nil { - return fmt.Errorf("failed to load config: %w", err) - } - - // Apply CLI overrides - if version != "" { - cfg.SetVersion(version) - } - - // Print header - fmt.Printf("%s Generating SDKs\n", releaseHeaderStyle.Render("SDK Release:")) - if dryRun { - fmt.Printf(" %s\n", releaseDimStyle.Render("(dry-run mode)")) - } - fmt.Println() - - // Run SDK release - result, err := release.RunSDK(ctx, cfg, dryRun) - if err != nil { - fmt.Printf("%s %v\n", releaseErrorStyle.Render("Error:"), err) - return err - } - - // Print summary - fmt.Println() - fmt.Printf("%s SDK generation complete!\n", releaseSuccessStyle.Render("Success:")) - fmt.Printf(" Version: %s\n", releaseValueStyle.Render(result.Version)) - fmt.Printf(" Languages: %v\n", result.Languages) - fmt.Printf(" Output: %s/\n", releaseValueStyle.Render(result.Output)) - - return nil -} -``` - -**Step 2: Verify it compiles and help shows flag** - -Run: `go build -o bin/core ./cmd/core && ./bin/core release --help` -Expected: Shows `--target` flag in help output - -**Step 3: Commit** - -```bash -git add cmd/core/cmd/release.go -git commit -m "feat(cli): implement runReleaseSDK for SDK generation" -``` - ---- - -## Task 6: Add integration tests - -**Files:** -- Modify: `pkg/release/sdk_test.go` - -**Step 1: Add more test cases** - -```go -func TestRunSDK_Good_WithDiffEnabled(t *testing.T) { - cfg := &Config{ - SDK: &SDKConfig{ - Languages: []string{"typescript"}, - Output: "sdk", - Diff: SDKDiffConfig{ - Enabled: true, - FailOnBreaking: false, - }, - }, - } - cfg.projectDir = "/tmp" - cfg.version = "v1.0.0" - - // Dry run should succeed even without git repo - result, err := RunSDK(context.Background(), cfg, true) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - if result.Version != "v1.0.0" { - t.Errorf("expected v1.0.0, got %s", result.Version) - } -} - -func TestRunSDK_Good_DefaultOutput(t *testing.T) { - cfg := &Config{ - SDK: &SDKConfig{ - Languages: []string{"go"}, - // Output not set - should default to "sdk" - }, - } - cfg.projectDir = "/tmp" - cfg.version = "v1.0.0" - - result, err := RunSDK(context.Background(), cfg, true) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - if result.Output != "sdk" { - t.Errorf("expected default output 'sdk', got %s", result.Output) - } -} - -func TestToSDKConfig_Good_Conversion(t *testing.T) { - relCfg := &SDKConfig{ - Spec: "api.yaml", - Languages: []string{"typescript", "python"}, - Output: "generated", - Package: SDKPackageConfig{ - Name: "my-sdk", - Version: "v2.0.0", - }, - Diff: SDKDiffConfig{ - Enabled: true, - FailOnBreaking: true, - }, - } - - sdkCfg := toSDKConfig(relCfg) - - if sdkCfg.Spec != "api.yaml" { - t.Errorf("expected spec api.yaml, got %s", sdkCfg.Spec) - } - if len(sdkCfg.Languages) != 2 { - t.Errorf("expected 2 languages, got %d", len(sdkCfg.Languages)) - } - if sdkCfg.Package.Name != "my-sdk" { - t.Errorf("expected package name my-sdk, got %s", sdkCfg.Package.Name) - } -} - -func TestToSDKConfig_Good_NilInput(t *testing.T) { - result := toSDKConfig(nil) - if result != nil { - t.Error("expected nil for nil input") - } -} -``` - -**Step 2: Run all tests** - -Run: `go test ./pkg/release/... -v` -Expected: All tests PASS - -**Step 3: Commit** - -```bash -git add pkg/release/sdk_test.go -git commit -m "test(release): add SDK release integration tests" -``` - ---- - -## Task 7: Final verification and TODO update - -**Step 1: Build CLI** - -Run: `go build -o bin/core ./cmd/core` -Expected: Success - -**Step 2: Test help output** - -Run: `./bin/core release --help` -Expected: Shows `--target` flag - -**Step 3: Run all tests** - -Run: `go test ./pkg/release/... ./pkg/sdk/... -v` -Expected: All PASS - -**Step 4: Update TODO.md** - -Mark S3.4 `core release --target sdk` as complete in `tasks/TODO.md`. - -**Step 5: Commit** - -```bash -git add tasks/TODO.md -git commit -m "docs: mark S3.4 SDK release integration as complete" -``` diff --git a/tasks/plans/docs-sync-next-steps.md b/tasks/plans/docs-sync-next-steps.md deleted file mode 100644 index d360a05..0000000 --- a/tasks/plans/docs-sync-next-steps.md +++ /dev/null @@ -1,43 +0,0 @@ -# Docs Sync Setup - Next Steps - -After moving repo to `~/Code/host-uk/core`: - -## 1. Add to repos.yaml - -Add this to `/Users/snider/Code/host-uk/repos.yaml` under `repos:`: - -```yaml - # CLI (Go) - core: - type: foundation - description: Core CLI - build, release, deploy for Go/Wails/PHP/containers - docs: true - ci: github-actions -``` - -## 2. Test docs sync - -```bash -cd ~/Code/host-uk -core docs list # Should show "core" with docs -core docs sync --dry-run # Preview what syncs -``` - -## 3. Add CLI section to VitePress (core-php) - -Edit `core-php/docs/.vitepress/config.js`: -- Add `/cli/` to nav -- Add sidebar for CLI commands - -## 4. Sync and verify - -```bash -core docs sync --output ../core-php/docs/cli -``` - ---- - -Current state: -- CLI docs written in `docs/cmd/*.md` (12 files) -- `docs/index.md` updated with command table -- All committed to git diff --git a/testdata/cli_clir.go.bak b/testdata/cli_clir.go.bak new file mode 100644 index 0000000..fd4c33b --- /dev/null +++ b/testdata/cli_clir.go.bak @@ -0,0 +1,1339 @@ +package core + +import ( + "flag" + "fmt" + "io" + "os" + "reflect" + "strconv" + "strings" +) + +// Command represents a command that may be run by the user +type clirCommand struct { + name string + commandPath string + shortdescription string + longdescription string + subCommands []*clirCommand + subCommandsMap map[string]*clirCommand + longestSubcommand int + actionCallback CliAction + app *Cli + flags *flag.FlagSet + flagCount int + helpFlag bool + hidden bool + positionalArgsMap map[string]reflect.Value + sliceSeparator map[string]string +} + +// NewCommand creates a new Command. +// Description is optional — if omitted, i18n resolves it from the command path. +func newClirCommand(name string, description ...string) *clirCommand { + desc := "" + if len(description) > 0 { + desc = description[0] + } + result := &Command{ + name: name, + shortdescription: desc, + subCommandsMap: make(map[string]*clirCommand), + hidden: false, + positionalArgsMap: make(map[string]reflect.Value), + sliceSeparator: make(map[string]string), + } + + // Init flagset so flags can be added before Run + result.setParentCommandPath("") + + return result +} + +func (c *clirCommand) setParentCommandPath(parentCommandPath string) { + // Set up command path + if parentCommandPath != "" { + c.commandPath += parentCommandPath + " " + } + c.commandPath += c.name + + // Set up flag set + c.flags = flag.NewFlagSet(c.commandPath, flag.ContinueOnError) + c.BoolFlag("help", "Get help on the '"+strings.ToLower(c.commandPath)+"' command.", &c.helpFlag) + + // result.Flags.Usage = result.PrintHelp + +} + +func (c *clirCommand) inheritFlags(inheritFlags *flag.FlagSet) { + // inherit flags + inheritFlags.VisitAll(func(f *flag.Flag) { + if f.Name != "help" { + c.flags.Var(f.Value, f.Name, f.Usage) + } + }) +} + +func (c *clirCommand) setApp(app *Cli) { + c.app = app +} + +// parseFlags parses the given flags +func (c *clirCommand) parseFlags(args []string) error { + // Parse flags + // Suppress flag parse errors to stderr + + c.flags.SetOutput(io.Discard) + + // Credit: https://stackoverflow.com/a/74146375 + var positionalArgs []string + for { + if err := c.flags.Parse(args); err != nil { + return err + } + // Consume all the flags that were parsed as flags. + args = args[len(args)-c.flags.NArg():] + if len(args) == 0 { + break + } + // There's at least one flag remaining and it must be a positional arg since + // we consumed all args that were parsed as flags. Consume just the first + // one, and retry parsing, since subsequent args may be flags. + positionalArgs = append(positionalArgs, args[0]) + args = args[1:] + } + + // Parse just the positional args so that flagset.Args()/flagset.NArgs() + // return the expected value. + // Note: This should never return an error. + err := c.flags.Parse(positionalArgs) + if err != nil { + return err + } + + if len(positionalArgs) > 0 { + return c.parsePositionalArgs(positionalArgs) + } + return nil +} + +// Run - Runs the Command with the given arguments +func (c *clirCommand) run(args []string) error { + + // If we have arguments, process them + if len(args) > 0 { + // Check for subcommand + subcommand := c.subCommandsMap[args[0]] + if subcommand != nil { + return subcommand.run(args[1:]) + } + + // Parse flags + err := c.parseFlags(args) + if err != nil { + if c.app.errorHandler != nil { + return c.app.errorHandler(c.commandPath, err) + } + return E("cli.Run", fmt.Sprintf("see '%s --help' for usage", c.commandPath), err) + } + + // Help takes precedence + if c.helpFlag { + c.PrintHelp() + return nil + } + } + + // Do we have an action? + if c.actionCallback != nil { + return c.actionCallback() + } + + // If we haven't specified a subcommand + // check for an app level default command + if c.app.defaultCommand != nil { + // Prevent recursion! + if c.app.defaultCommand != c { + // only run default command if no args passed + if len(args) == 0 { + return c.app.defaultCommand.run(args) + } + } + } + + // Nothing left we can do + c.PrintHelp() + + return nil +} + +// Action - Define an action from this command +func (c *clirCommand) Action(callback CliAction) *clirCommand { + c.actionCallback = callback + return c +} + +// PrintHelp - Output the help text for this command +func (c *clirCommand) PrintHelp() { + c.app.PrintBanner() + + commandTitle := c.commandPath + if c.shortdescription != "" { + commandTitle += " - " + c.shortdescription + } + // Ignore root command + if c.commandPath != c.name { + fmt.Println(commandTitle) + } + if c.longdescription != "" { + fmt.Println(c.longdescription + "\n") + } + if len(c.subCommands) > 0 { + fmt.Println("Available commands:") + fmt.Println("") + for _, subcommand := range c.subCommands { + if subcommand.isHidden() { + continue + } + spacer := strings.Repeat(" ", 3+c.longestSubcommand-len(subcommand.name)) + isDefault := "" + if subcommand.isDefaultCommand() { + isDefault = "[default]" + } + fmt.Printf(" %s%s%s %s\n", subcommand.name, spacer, subcommand.shortdescription, isDefault) + } + fmt.Println("") + } + if c.flagCount > 0 { + fmt.Println("Flags:") + fmt.Println() + c.flags.SetOutput(os.Stdout) + c.flags.PrintDefaults() + c.flags.SetOutput(os.Stderr) + + } + fmt.Println() +} + +// isDefaultCommand returns true if called on the default command +func (c *clirCommand) isDefaultCommand() bool { + return c.app.defaultCommand == c +} + +// isHidden returns true if the command is a hidden command +func (c *clirCommand) isHidden() bool { + return c.hidden +} + +// Hidden hides the command from the Help system +func (c *clirCommand) Hidden() { + c.hidden = true +} + +// NewChildCommand - Creates a new subcommand +func (c *clirCommand) NewChildCommand(name string, description ...string) *clirCommand { + result := NewCommand(name, description...) + c.AddCommand(result) + return result +} + +// AddCommand - Adds a subcommand +func (c *clirCommand) AddCommand(command *clirCommand) { + command.setApp(c.app) + command.setParentCommandPath(c.commandPath) + name := command.name + c.subCommands = append(c.subCommands, command) + c.subCommandsMap[name] = command + if len(name) > c.longestSubcommand { + c.longestSubcommand = len(name) + } +} + +// NewChildCommandInheritFlags - Creates a new subcommand, inherits flags from command +func (c *clirCommand) NewChildCommandInheritFlags(name string, description ...string) *clirCommand { + result := c.NewChildCommand(name, description...) + result.inheritFlags(c.flags) + return result +} + +func (c *clirCommand) AddFlags(optionStruct any) *clirCommand { + // use reflection to determine if this is a pointer to a struct + // if not, panic + + t := reflect.TypeOf(optionStruct) + + // Check for a pointer to a struct + if t.Kind() != reflect.Ptr { + panic("AddFlags() requires a pointer to a struct") + } + if t.Elem().Kind() != reflect.Struct { + panic("AddFlags() requires a pointer to a struct") + } + + // Iterate through the fields of the struct reading the struct tags + // and adding the flags + v := reflect.ValueOf(optionStruct).Elem() + for i := 0; i < v.NumField(); i++ { + field := v.Field(i) + fieldType := t.Elem().Field(i) + if !fieldType.IsExported() { + continue + } + // If this is an embedded struct, recurse + if fieldType.Type.Kind() == reflect.Struct { + c.AddFlags(field.Addr().Interface()) + continue + } + + tag := t.Elem().Field(i).Tag + name := tag.Get("name") + description := tag.Get("description") + defaultValue := tag.Get("default") + pos := tag.Get("pos") + sep := tag.Get("sep") + c.positionalArgsMap[pos] = field + if sep != "" { + c.sliceSeparator[pos] = sep + } + if name == "" { + name = strings.ToLower(t.Elem().Field(i).Name) + } + switch field.Kind() { + case reflect.Bool: + var defaultValueBool bool + if defaultValue != "" { + var err error + defaultValueBool, err = strconv.ParseBool(defaultValue) + if err != nil { + panic("Invalid default value for bool flag") + } + } + field.SetBool(defaultValueBool) + c.BoolFlag(name, description, field.Addr().Interface().(*bool)) + case reflect.String: + if defaultValue != "" { + // set value of field to default value + field.SetString(defaultValue) + } + c.StringFlag(name, description, field.Addr().Interface().(*string)) + case reflect.Int: + if defaultValue != "" { + // set value of field to default value + value, err := strconv.Atoi(defaultValue) + if err != nil { + panic("Invalid default value for int flag") + } + field.SetInt(int64(value)) + } + c.IntFlag(name, description, field.Addr().Interface().(*int)) + case reflect.Int8: + if defaultValue != "" { + // set value of field to default value + value, err := strconv.Atoi(defaultValue) + if err != nil { + panic("Invalid default value for int8 flag") + } + field.SetInt(int64(value)) + } + c.Int8Flag(name, description, field.Addr().Interface().(*int8)) + case reflect.Int16: + if defaultValue != "" { + // set value of field to default value + value, err := strconv.Atoi(defaultValue) + if err != nil { + panic("Invalid default value for int16 flag") + } + field.SetInt(int64(value)) + } + c.Int16Flag(name, description, field.Addr().Interface().(*int16)) + case reflect.Int32: + if defaultValue != "" { + // set value of field to default value + value, err := strconv.Atoi(defaultValue) + if err != nil { + panic("Invalid default value for int32 flag") + } + field.SetInt(int64(value)) + } + c.Int32Flag(name, description, field.Addr().Interface().(*int32)) + case reflect.Int64: + if defaultValue != "" { + // set value of field to default value + value, err := strconv.Atoi(defaultValue) + if err != nil { + panic("Invalid default value for int64 flag") + } + field.SetInt(int64(value)) + } + c.Int64Flag(name, description, field.Addr().Interface().(*int64)) + case reflect.Uint: + if defaultValue != "" { + // set value of field to default value + value, err := strconv.Atoi(defaultValue) + if err != nil { + panic("Invalid default value for uint flag") + } + field.SetUint(uint64(value)) + } + c.UintFlag(name, description, field.Addr().Interface().(*uint)) + case reflect.Uint8: + if defaultValue != "" { + // set value of field to default value + value, err := strconv.Atoi(defaultValue) + if err != nil { + panic("Invalid default value for uint8 flag") + } + field.SetUint(uint64(value)) + } + c.Uint8Flag(name, description, field.Addr().Interface().(*uint8)) + case reflect.Uint16: + if defaultValue != "" { + // set value of field to default value + value, err := strconv.Atoi(defaultValue) + if err != nil { + panic("Invalid default value for uint16 flag") + } + field.SetUint(uint64(value)) + } + c.Uint16Flag(name, description, field.Addr().Interface().(*uint16)) + case reflect.Uint32: + if defaultValue != "" { + // set value of field to default value + value, err := strconv.Atoi(defaultValue) + if err != nil { + panic("Invalid default value for uint32 flag") + } + field.SetUint(uint64(value)) + } + c.Uint32Flag(name, description, field.Addr().Interface().(*uint32)) + case reflect.Uint64: + if defaultValue != "" { + // set value of field to default value + value, err := strconv.Atoi(defaultValue) + if err != nil { + panic("Invalid default value for uint64 flag") + } + field.SetUint(uint64(value)) + } + c.UInt64Flag(name, description, field.Addr().Interface().(*uint64)) + case reflect.Float32: + if defaultValue != "" { + // set value of field to default value + value, err := strconv.ParseFloat(defaultValue, 64) + if err != nil { + panic("Invalid default value for float32 flag") + } + field.SetFloat(value) + } + c.Float32Flag(name, description, field.Addr().Interface().(*float32)) + case reflect.Float64: + if defaultValue != "" { + // set value of field to default value + value, err := strconv.ParseFloat(defaultValue, 64) + if err != nil { + panic("Invalid default value for float64 flag") + } + field.SetFloat(value) + } + c.Float64Flag(name, description, field.Addr().Interface().(*float64)) + case reflect.Slice: + c.addSliceField(field, defaultValue, sep) + c.addSliceFlags(name, description, field) + default: + if pos != "" { + fmt.Fprintf(os.Stderr, "WARNING: unsupported type for flag: %s %s\n", fieldType.Type.Kind(), name) + } + } + } + + return c +} + +func (c *clirCommand) addSliceFlags(name, description string, field reflect.Value) *clirCommand { + if field.Kind() != reflect.Slice { + panic("addSliceFlags() requires a pointer to a slice") + } + t := reflect.TypeOf(field.Addr().Interface()) + if t.Kind() != reflect.Ptr { + panic("addSliceFlags() requires a pointer to a slice") + } + if t.Elem().Kind() != reflect.Slice { + panic("addSliceFlags() requires a pointer to a slice") + } + switch t.Elem().Elem().Kind() { + case reflect.Bool: + c.BoolsFlag(name, description, field.Addr().Interface().(*[]bool)) + case reflect.String: + c.StringsFlag(name, description, field.Addr().Interface().(*[]string)) + case reflect.Int: + c.IntsFlag(name, description, field.Addr().Interface().(*[]int)) + case reflect.Int8: + c.Int8sFlag(name, description, field.Addr().Interface().(*[]int8)) + case reflect.Int16: + c.Int16sFlag(name, description, field.Addr().Interface().(*[]int16)) + case reflect.Int32: + c.Int32sFlag(name, description, field.Addr().Interface().(*[]int32)) + case reflect.Int64: + c.Int64sFlag(name, description, field.Addr().Interface().(*[]int64)) + case reflect.Uint: + c.UintsFlag(name, description, field.Addr().Interface().(*[]uint)) + case reflect.Uint8: + c.Uint8sFlag(name, description, field.Addr().Interface().(*[]uint8)) + case reflect.Uint16: + c.Uint16sFlag(name, description, field.Addr().Interface().(*[]uint16)) + case reflect.Uint32: + c.Uint32sFlag(name, description, field.Addr().Interface().(*[]uint32)) + case reflect.Uint64: + c.Uint64sFlag(name, description, field.Addr().Interface().(*[]uint64)) + case reflect.Float32: + c.Float32sFlag(name, description, field.Addr().Interface().(*[]float32)) + case reflect.Float64: + c.Float64sFlag(name, description, field.Addr().Interface().(*[]float64)) + default: + panic(fmt.Sprintf("addSliceFlags() not supported slice type %s", t.Elem().Elem().Kind().String())) + } + return c +} + +func (c *clirCommand) addSliceField(field reflect.Value, defaultValue, separator string) *clirCommand { + if defaultValue == "" { + return c + } + if field.Kind() != reflect.Slice { + panic("addSliceField() requires a pointer to a slice") + } + t := reflect.TypeOf(field.Addr().Interface()) + if t.Kind() != reflect.Ptr { + panic("addSliceField() requires a pointer to a slice") + } + if t.Elem().Kind() != reflect.Slice { + panic("addSliceField() requires a pointer to a slice") + } + defaultSlice := []string{defaultValue} + if separator != "" { + defaultSlice = strings.Split(defaultValue, separator) + } + switch t.Elem().Elem().Kind() { + case reflect.Bool: + defaultValues := make([]bool, 0, len(defaultSlice)) + for _, value := range defaultSlice { + val, err := strconv.ParseBool(value) + if err != nil { + panic("Invalid default value for bool flag") + } + defaultValues = append(defaultValues, val) + } + field.Set(reflect.ValueOf(defaultValues)) + case reflect.String: + field.Set(reflect.ValueOf(defaultSlice)) + case reflect.Int: + defaultValues := make([]int, 0, len(defaultSlice)) + for _, value := range defaultSlice { + val, err := strconv.Atoi(value) + if err != nil { + panic("Invalid default value for int flag") + } + defaultValues = append(defaultValues, val) + } + field.Set(reflect.ValueOf(defaultValues)) + case reflect.Int8: + defaultValues := make([]int8, 0, len(defaultSlice)) + for _, value := range defaultSlice { + val, err := strconv.Atoi(value) + if err != nil { + panic("Invalid default value for int8 flag") + } + defaultValues = append(defaultValues, int8(val)) + } + field.Set(reflect.ValueOf(defaultValues)) + case reflect.Int16: + defaultValues := make([]int16, 0, len(defaultSlice)) + for _, value := range defaultSlice { + val, err := strconv.Atoi(value) + if err != nil { + panic("Invalid default value for int16 flag") + } + defaultValues = append(defaultValues, int16(val)) + } + field.Set(reflect.ValueOf(defaultValues)) + case reflect.Int32: + defaultValues := make([]int32, 0, len(defaultSlice)) + for _, value := range defaultSlice { + val, err := strconv.ParseInt(value, 10, 32) + if err != nil { + panic("Invalid default value for int32 flag") + } + defaultValues = append(defaultValues, int32(val)) + } + field.Set(reflect.ValueOf(defaultValues)) + case reflect.Int64: + defaultValues := make([]int64, 0, len(defaultSlice)) + for _, value := range defaultSlice { + val, err := strconv.ParseInt(value, 10, 64) + if err != nil { + panic("Invalid default value for int64 flag") + } + defaultValues = append(defaultValues, val) + } + field.Set(reflect.ValueOf(defaultValues)) + case reflect.Uint: + defaultValues := make([]uint, 0, len(defaultSlice)) + for _, value := range defaultSlice { + val, err := strconv.Atoi(value) + if err != nil { + panic("Invalid default value for uint flag") + } + defaultValues = append(defaultValues, uint(val)) + } + field.Set(reflect.ValueOf(defaultValues)) + case reflect.Uint8: + defaultValues := make([]uint8, 0, len(defaultSlice)) + for _, value := range defaultSlice { + val, err := strconv.Atoi(value) + if err != nil { + panic("Invalid default value for uint8 flag") + } + defaultValues = append(defaultValues, uint8(val)) + } + field.Set(reflect.ValueOf(defaultValues)) + case reflect.Uint16: + defaultValues := make([]uint16, 0, len(defaultSlice)) + for _, value := range defaultSlice { + val, err := strconv.Atoi(value) + if err != nil { + panic("Invalid default value for uint16 flag") + } + defaultValues = append(defaultValues, uint16(val)) + } + field.Set(reflect.ValueOf(defaultValues)) + case reflect.Uint32: + defaultValues := make([]uint32, 0, len(defaultSlice)) + for _, value := range defaultSlice { + val, err := strconv.Atoi(value) + if err != nil { + panic("Invalid default value for uint32 flag") + } + defaultValues = append(defaultValues, uint32(val)) + } + field.Set(reflect.ValueOf(defaultValues)) + case reflect.Uint64: + defaultValues := make([]uint64, 0, len(defaultSlice)) + for _, value := range defaultSlice { + val, err := strconv.Atoi(value) + if err != nil { + panic("Invalid default value for uint64 flag") + } + defaultValues = append(defaultValues, uint64(val)) + } + field.Set(reflect.ValueOf(defaultValues)) + case reflect.Float32: + defaultValues := make([]float32, 0, len(defaultSlice)) + for _, value := range defaultSlice { + val, err := strconv.ParseFloat(value, 32) + if err != nil { + panic("Invalid default value for float32 flag") + } + defaultValues = append(defaultValues, float32(val)) + } + field.Set(reflect.ValueOf(defaultValues)) + case reflect.Float64: + defaultValues := make([]float64, 0, len(defaultSlice)) + for _, value := range defaultSlice { + val, err := strconv.ParseFloat(value, 64) + if err != nil { + panic("Invalid default value for float64 flag") + } + defaultValues = append(defaultValues, float64(val)) + } + field.Set(reflect.ValueOf(defaultValues)) + default: + panic(fmt.Sprintf("addSliceField() not supported slice type %s", t.Elem().Elem().Kind().String())) + } + return c +} + +// BoolFlag - Adds a boolean flag to the command +func (c *clirCommand) BoolFlag(name, description string, variable *bool) *clirCommand { + c.flags.BoolVar(variable, name, *variable, description) + c.flagCount++ + return c +} + +// BoolsFlag - Adds a booleans flag to the command +func (c *clirCommand) BoolsFlag(name, description string, variable *[]bool) *clirCommand { + c.flags.Var(newBoolsValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +// StringFlag - Adds a string flag to the command +func (c *clirCommand) StringFlag(name, description string, variable *string) *clirCommand { + c.flags.StringVar(variable, name, *variable, description) + c.flagCount++ + return c +} + +// StringsFlag - Adds a strings flag to the command +func (c *clirCommand) StringsFlag(name, description string, variable *[]string) *clirCommand { + c.flags.Var(newStringsValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +// IntFlag - Adds an int flag to the command +func (c *clirCommand) IntFlag(name, description string, variable *int) *clirCommand { + c.flags.IntVar(variable, name, *variable, description) + c.flagCount++ + return c +} + +// IntsFlag - Adds an ints flag to the command +func (c *clirCommand) IntsFlag(name, description string, variable *[]int) *clirCommand { + c.flags.Var(newIntsValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Int8Flag - Adds an int8 flag to the command +func (c *clirCommand) Int8Flag(name, description string, variable *int8) *clirCommand { + c.flags.Var(newInt8Value(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Int8sFlag - Adds an int8 s flag to the command +func (c *clirCommand) Int8sFlag(name, description string, variable *[]int8) *clirCommand { + c.flags.Var(newInt8sValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Int16Flag - Adds an int16 flag to the command +func (c *clirCommand) Int16Flag(name, description string, variable *int16) *clirCommand { + c.flags.Var(newInt16Value(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Int16sFlag - Adds an int16s flag to the command +func (c *clirCommand) Int16sFlag(name, description string, variable *[]int16) *clirCommand { + c.flags.Var(newInt16sValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Int32Flag - Adds an int32 flag to the command +func (c *clirCommand) Int32Flag(name, description string, variable *int32) *clirCommand { + c.flags.Var(newInt32Value(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Int32sFlag - Adds an int32s flag to the command +func (c *clirCommand) Int32sFlag(name, description string, variable *[]int32) *clirCommand { + c.flags.Var(newInt32sValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Int64Flag - Adds an int64 flag to the command +func (c *clirCommand) Int64Flag(name, description string, variable *int64) *clirCommand { + c.flags.Int64Var(variable, name, *variable, description) + c.flagCount++ + return c +} + +// Int64sFlag - Adds an int64s flag to the command +func (c *clirCommand) Int64sFlag(name, description string, variable *[]int64) *clirCommand { + c.flags.Var(newInt64sValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +// UintFlag - Adds an uint flag to the command +func (c *clirCommand) UintFlag(name, description string, variable *uint) *clirCommand { + c.flags.UintVar(variable, name, *variable, description) + c.flagCount++ + return c +} + +// UintsFlag - Adds an uints flag to the command +func (c *clirCommand) UintsFlag(name, description string, variable *[]uint) *clirCommand { + c.flags.Var(newUintsValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Uint8Flag - Adds an uint8 flag to the command +func (c *clirCommand) Uint8Flag(name, description string, variable *uint8) *clirCommand { + c.flags.Var(newUint8Value(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Uint8sFlag - Adds an uint8 s flag to the command +func (c *clirCommand) Uint8sFlag(name, description string, variable *[]uint8) *clirCommand { + c.flags.Var(newUint8sValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Uint16Flag - Adds an uint16 flag to the command +func (c *clirCommand) Uint16Flag(name, description string, variable *uint16) *clirCommand { + c.flags.Var(newUint16Value(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Uint16sFlag - Adds an uint16s flag to the command +func (c *clirCommand) Uint16sFlag(name, description string, variable *[]uint16) *clirCommand { + c.flags.Var(newUint16sValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Uint32Flag - Adds an uint32 flag to the command +func (c *clirCommand) Uint32Flag(name, description string, variable *uint32) *clirCommand { + c.flags.Var(newUint32Value(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Uint32sFlag - Adds an uint32s flag to the command +func (c *clirCommand) Uint32sFlag(name, description string, variable *[]uint32) *clirCommand { + c.flags.Var(newUint32sValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +// UInt64Flag - Adds an uint64 flag to the command +func (c *clirCommand) UInt64Flag(name, description string, variable *uint64) *clirCommand { + c.flags.Uint64Var(variable, name, *variable, description) + c.flagCount++ + return c +} + +// Uint64sFlag - Adds an uint64s flag to the command +func (c *clirCommand) Uint64sFlag(name, description string, variable *[]uint64) *clirCommand { + c.flags.Var(newUint64sValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Float64Flag - Adds a float64 flag to the command +func (c *clirCommand) Float64Flag(name, description string, variable *float64) *clirCommand { + c.flags.Float64Var(variable, name, *variable, description) + c.flagCount++ + return c +} + +// Float32Flag - Adds a float32 flag to the command +func (c *clirCommand) Float32Flag(name, description string, variable *float32) *clirCommand { + c.flags.Var(newFloat32Value(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Float32sFlag - Adds a float32s flag to the command +func (c *clirCommand) Float32sFlag(name, description string, variable *[]float32) *clirCommand { + c.flags.Var(newFloat32sValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +// Float64sFlag - Adds a float64s flag to the command +func (c *clirCommand) Float64sFlag(name, description string, variable *[]float64) *clirCommand { + c.flags.Var(newFloat64sValue(*variable, variable), name, description) + c.flagCount++ + return c +} + +type boolsFlagVar []bool + +func (f *boolsFlagVar) String() string { return fmt.Sprint([]bool(*f)) } + +func (f *boolsFlagVar) Set(value string) error { + if value == "" { + *f = append(*f, false) + return nil + } + b, err := strconv.ParseBool(value) + if err != nil { + return err + } + *f = append(*f, b) + return nil +} + +func (f *boolsFlagVar) IsBoolFlag() bool { + return true +} + +func newBoolsValue(val []bool, p *[]bool) *boolsFlagVar { + *p = val + return (*boolsFlagVar)(p) +} + +type stringsFlagVar []string + +func (f *stringsFlagVar) String() string { return fmt.Sprint([]string(*f)) } + +func (f *stringsFlagVar) Set(value string) error { + *f = append(*f, value) + return nil +} + +func newStringsValue(val []string, p *[]string) *stringsFlagVar { + *p = val + return (*stringsFlagVar)(p) +} + +type intsFlagVar []int + +func (f *intsFlagVar) String() string { return fmt.Sprint([]int(*f)) } + +func (f *intsFlagVar) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = append(*f, i) + return nil +} + +func newIntsValue(val []int, p *[]int) *intsFlagVar { + *p = val + return (*intsFlagVar)(p) +} + +type int8Value int8 + +func newInt8Value(val int8, p *int8) *int8Value { + *p = val + return (*int8Value)(p) +} + +func (f *int8Value) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = int8Value(i) + return nil +} + +func (f *int8Value) String() string { return fmt.Sprint(int8(*f)) } + +type int8sFlagVar []int8 + +func (f *int8sFlagVar) String() string { return fmt.Sprint([]int8(*f)) } + +func (f *int8sFlagVar) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = append(*f, int8(i)) + return nil +} + +func newInt8sValue(val []int8, p *[]int8) *int8sFlagVar { + *p = val + return (*int8sFlagVar)(p) +} + +type int16Value int16 + +func newInt16Value(val int16, p *int16) *int16Value { + *p = val + return (*int16Value)(p) +} + +func (f *int16Value) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = int16Value(i) + return nil +} + +func (f *int16Value) String() string { return fmt.Sprint(int16(*f)) } + +type int16sFlagVar []int16 + +func (f *int16sFlagVar) String() string { return fmt.Sprint([]int16(*f)) } + +func (f *int16sFlagVar) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = append(*f, int16(i)) + return nil +} + +func newInt16sValue(val []int16, p *[]int16) *int16sFlagVar { + *p = val + return (*int16sFlagVar)(p) +} + +type int32Value int32 + +func newInt32Value(val int32, p *int32) *int32Value { + *p = val + return (*int32Value)(p) +} + +func (f *int32Value) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = int32Value(i) + return nil +} + +func (f *int32Value) String() string { return fmt.Sprint(int32(*f)) } + +type int32sFlagVar []int32 + +func (f *int32sFlagVar) String() string { return fmt.Sprint([]int32(*f)) } + +func (f *int32sFlagVar) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = append(*f, int32(i)) + return nil +} + +func newInt32sValue(val []int32, p *[]int32) *int32sFlagVar { + *p = val + return (*int32sFlagVar)(p) +} + +type int64sFlagVar []int64 + +func (f *int64sFlagVar) String() string { return fmt.Sprint([]int64(*f)) } + +func (f *int64sFlagVar) Set(value string) error { + i, err := strconv.ParseInt(value, 10, 64) + if err != nil { + return err + } + *f = append(*f, i) + return nil +} + +func newInt64sValue(val []int64, p *[]int64) *int64sFlagVar { + *p = val + return (*int64sFlagVar)(p) +} + +type uintsFlagVar []uint + +func (f *uintsFlagVar) String() string { + return fmt.Sprint([]uint(*f)) +} + +func (f *uintsFlagVar) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = append(*f, uint(i)) + return nil +} + +func newUintsValue(val []uint, p *[]uint) *uintsFlagVar { + *p = val + return (*uintsFlagVar)(p) +} + +type uint8FlagVar uint8 + +func newUint8Value(val uint8, p *uint8) *uint8FlagVar { + *p = val + return (*uint8FlagVar)(p) +} + +func (f *uint8FlagVar) String() string { + return fmt.Sprint(uint8(*f)) +} + +func (f *uint8FlagVar) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = uint8FlagVar(i) + return nil +} + +type uint8sFlagVar []uint8 + +func (f *uint8sFlagVar) String() string { + return fmt.Sprint([]uint8(*f)) +} + +func (f *uint8sFlagVar) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = append(*f, uint8(i)) + return nil +} + +func newUint8sValue(val []uint8, p *[]uint8) *uint8sFlagVar { + *p = val + return (*uint8sFlagVar)(p) +} + +type uint16FlagVar uint16 + +func newUint16Value(val uint16, p *uint16) *uint16FlagVar { + *p = val + return (*uint16FlagVar)(p) +} + +func (f *uint16FlagVar) String() string { + return fmt.Sprint(uint16(*f)) +} + +func (f *uint16FlagVar) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = uint16FlagVar(i) + return nil +} + +type uint16sFlagVar []uint16 + +func (f *uint16sFlagVar) String() string { + return fmt.Sprint([]uint16(*f)) +} + +func (f *uint16sFlagVar) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = append(*f, uint16(i)) + return nil +} + +func newUint16sValue(val []uint16, p *[]uint16) *uint16sFlagVar { + *p = val + return (*uint16sFlagVar)(p) +} + +type uint32FlagVar uint32 + +func newUint32Value(val uint32, p *uint32) *uint32FlagVar { + *p = val + return (*uint32FlagVar)(p) +} + +func (f *uint32FlagVar) String() string { + return fmt.Sprint(uint32(*f)) +} + +func (f *uint32FlagVar) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = uint32FlagVar(i) + return nil +} + +type uint32sFlagVar []uint32 + +func (f *uint32sFlagVar) String() string { + return fmt.Sprint([]uint32(*f)) +} + +func (f *uint32sFlagVar) Set(value string) error { + i, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = append(*f, uint32(i)) + return nil +} + +func newUint32sValue(val []uint32, p *[]uint32) *uint32sFlagVar { + *p = val + return (*uint32sFlagVar)(p) +} + +type uint64sFlagVar []uint64 + +func (f *uint64sFlagVar) String() string { return fmt.Sprint([]uint64(*f)) } + +func (f *uint64sFlagVar) Set(value string) error { + i, err := strconv.ParseUint(value, 10, 64) + if err != nil { + return err + } + *f = append(*f, i) + return nil +} + +func newUint64sValue(val []uint64, p *[]uint64) *uint64sFlagVar { + *p = val + return (*uint64sFlagVar)(p) +} + +type float32sFlagVar []float32 + +func (f *float32sFlagVar) String() string { return fmt.Sprint([]float32(*f)) } + +func (f *float32sFlagVar) Set(value string) error { + i, err := strconv.ParseFloat(value, 64) + if err != nil { + return err + } + *f = append(*f, float32(i)) + return nil +} + +func newFloat32sValue(val []float32, p *[]float32) *float32sFlagVar { + *p = val + return (*float32sFlagVar)(p) +} + +type float32FlagVar float32 + +func (f *float32FlagVar) String() string { return fmt.Sprint(float32(*f)) } + +func (f *float32FlagVar) Set(value string) error { + i, err := strconv.ParseFloat(value, 64) + if err != nil { + return err + } + *f = float32FlagVar(i) + return nil +} + +func newFloat32Value(val float32, p *float32) *float32FlagVar { + *p = val + return (*float32FlagVar)(p) +} + +type float64sFlagVar []float64 + +func (f *float64sFlagVar) String() string { return fmt.Sprint([]float64(*f)) } + +func (f *float64sFlagVar) Set(value string) error { + i, err := strconv.ParseFloat(value, 64) + if err != nil { + return err + } + *f = append(*f, i) + return nil +} + +func newFloat64sValue(val []float64, p *[]float64) *float64sFlagVar { + *p = val + return (*float64sFlagVar)(p) +} + +// LongDescription - Sets the long description for the command +func (c *clirCommand) LongDescription(longdescription string) *clirCommand { + c.longdescription = longdescription + return c +} + +// OtherArgs - Returns the non-flag arguments passed to the subcommand. NOTE: This should only be called within the context of an action. +func (c *clirCommand) OtherArgs() []string { + return c.flags.Args() +} + +func (c *clirCommand) NewChildCommandFunction(name string, description string, fn any) *clirCommand { + result := c.NewChildCommand(name, description) + // use reflection to determine if this is a function + // if not, panic + t := reflect.TypeOf(fn) + if t.Kind() != reflect.Func { + panic("NewChildCommandFunction '" + name + "' requires a function with the signature 'func(*struct) error'") + } + + // Check the function has 1 input ant it's a struct pointer + fnValue := reflect.ValueOf(fn) + if t.NumIn() != 1 { + panic("NewChildCommandFunction '" + name + "' requires a function with the signature 'func(*struct) error'") + } + // Check the input is a struct pointer + if t.In(0).Kind() != reflect.Ptr { + panic("NewChildCommandFunction '" + name + "' requires a function with the signature 'func(*struct) error'") + } + if t.In(0).Elem().Kind() != reflect.Struct { + panic("NewChildCommandFunction '" + name + "' requires a function with the signature 'func(*struct) error'") + } + // Check only 1 output and it's an error + if t.NumOut() != 1 { + panic("NewChildCommandFunction '" + name + "' requires a function with the signature 'func(*struct) error'") + } + if t.Out(0) != reflect.TypeOf((*error)(nil)).Elem() { + panic("NewChildCommandFunction '" + name + "' requires a function with the signature 'func(*struct) error'") + } + flags := reflect.New(t.In(0).Elem()) + result.Action(func() error { + result := fnValue.Call([]reflect.Value{flags})[0].Interface() + if result != nil { + return result.(error) + } + return nil + }) + result.AddFlags(flags.Interface()) + return result +} + +func (c *clirCommand) parsePositionalArgs(args []string) error { + for index, posArg := range args { + // Check the map for a field for this arg + key := strconv.Itoa(index + 1) + field, ok := c.positionalArgsMap[key] + if !ok { + continue + } + fieldType := field.Type() + switch fieldType.Kind() { + case reflect.Bool: + // set value of field to true + field.SetBool(true) + case reflect.String: + field.SetString(posArg) + case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int: + value, err := strconv.ParseInt(posArg, 10, 64) + if err != nil { + return err + } + field.SetInt(value) + case reflect.Uint64, reflect.Uint32, reflect.Uint16, reflect.Uint8, reflect.Uint: + value, err := strconv.ParseUint(posArg, 10, 64) + if err != nil { + return err + } + field.SetUint(value) + case reflect.Float64, reflect.Float32: + value, err := strconv.ParseFloat(posArg, 64) + if err != nil { + return err + } + field.SetFloat(value) + case reflect.Slice: + c.addSliceField(field, posArg, c.sliceSeparator[key]) + default: + return E("cli.parsePositionalArgs", "unsupported type for positional argument: "+fieldType.Name(), nil) + } + } + return nil +} diff --git a/testdata/scantest/sample.go b/testdata/scantest/sample.go new file mode 100644 index 0000000..3d58547 --- /dev/null +++ b/testdata/scantest/sample.go @@ -0,0 +1,7 @@ +package scantest + +import "dappco.re/go/core" + +func example() { + _, _ = core.GetAsset("mygroup", "myfile.txt") +} diff --git a/pkg/framework/core/testdata/test.txt b/testdata/test.txt similarity index 100% rename from pkg/framework/core/testdata/test.txt rename to testdata/test.txt diff --git a/utils.go b/utils.go new file mode 100644 index 0000000..038e32e --- /dev/null +++ b/utils.go @@ -0,0 +1,159 @@ +// SPDX-License-Identifier: EUPL-1.2 + +// Utility functions for the Core framework. +// Built on core string.go primitives. + +package core + +import ( + "fmt" + "io" + "os" +) + +// Print writes a formatted line to a writer, defaulting to os.Stdout. +// +// core.Print(nil, "hello %s", "world") // → stdout +// core.Print(w, "port: %d", 8080) // → w +func Print(w io.Writer, format string, args ...any) { + if w == nil { + w = os.Stdout + } + fmt.Fprintf(w, format+"\n", args...) +} + +// JoinPath joins string segments into a path with "/" separator. +// +// core.JoinPath("deploy", "to", "homelab") // → "deploy/to/homelab" +func JoinPath(segments ...string) string { + return Join("/", segments...) +} + +// IsFlag returns true if the argument starts with a dash. +// +// core.IsFlag("--verbose") // true +// core.IsFlag("-v") // true +// core.IsFlag("deploy") // false +func IsFlag(arg string) bool { + return HasPrefix(arg, "-") +} + +// Arg extracts a value from variadic args at the given index. +// Type-checks and delegates to the appropriate typed extractor. +// Returns Result — OK is false if index is out of bounds. +// +// r := core.Arg(0, args...) +// if r.OK { path = r.Value.(string) } +func Arg(index int, args ...any) Result { + if index >= len(args) { + return Result{} + } + v := args[index] + switch v.(type) { + case string: + return Result{ArgString(index, args...), true} + case int: + return Result{ArgInt(index, args...), true} + case bool: + return Result{ArgBool(index, args...), true} + default: + return Result{v, true} + } +} + +// ArgString extracts a string at the given index. +// +// name := core.ArgString(0, args...) +func ArgString(index int, args ...any) string { + if index >= len(args) { + return "" + } + s, ok := args[index].(string) + if !ok { + return "" + } + return s +} + +// ArgInt extracts an int at the given index. +// +// port := core.ArgInt(1, args...) +func ArgInt(index int, args ...any) int { + if index >= len(args) { + return 0 + } + i, ok := args[index].(int) + if !ok { + return 0 + } + return i +} + +// ArgBool extracts a bool at the given index. +// +// debug := core.ArgBool(2, args...) +func ArgBool(index int, args ...any) bool { + if index >= len(args) { + return false + } + b, ok := args[index].(bool) + if !ok { + return false + } + return b +} + +// FilterArgs removes empty strings and Go test runner flags from an argument list. +// +// clean := core.FilterArgs(os.Args[1:]) +func FilterArgs(args []string) []string { + var clean []string + for _, a := range args { + if a == "" || HasPrefix(a, "-test.") { + continue + } + clean = append(clean, a) + } + return clean +} + +// ParseFlag parses a single flag argument into key, value, and validity. +// Single dash (-) requires exactly 1 character (letter, emoji, unicode). +// Double dash (--) requires 2+ characters. +// +// "-v" → "v", "", true +// "-🔥" → "🔥", "", true +// "--verbose" → "verbose", "", true +// "--port=8080" → "port", "8080", true +// "-verbose" → "", "", false (single dash, 2+ chars) +// "--v" → "", "", false (double dash, 1 char) +// "hello" → "", "", false (not a flag) +func ParseFlag(arg string) (key, value string, valid bool) { + if HasPrefix(arg, "--") { + rest := TrimPrefix(arg, "--") + parts := SplitN(rest, "=", 2) + name := parts[0] + if RuneCount(name) < 2 { + return "", "", false + } + if len(parts) == 2 { + return name, parts[1], true + } + return name, "", true + } + + if HasPrefix(arg, "-") { + rest := TrimPrefix(arg, "-") + parts := SplitN(rest, "=", 2) + name := parts[0] + if RuneCount(name) != 1 { + return "", "", false + } + if len(parts) == 2 { + return name, parts[1], true + } + return name, "", true + } + + return "", "", false +} diff --git a/utils_test.go b/utils_test.go new file mode 100644 index 0000000..9b6be9d --- /dev/null +++ b/utils_test.go @@ -0,0 +1,217 @@ +package core_test + +import ( + "errors" + "testing" + + . "dappco.re/go/core" + "github.com/stretchr/testify/assert" +) + +// --- FilterArgs --- + +func TestFilterArgs_Good(t *testing.T) { + args := []string{"deploy", "", "to", "-test.v", "homelab", "-test.paniconexit0"} + clean := FilterArgs(args) + assert.Equal(t, []string{"deploy", "to", "homelab"}, clean) +} + +func TestFilterArgs_Empty_Good(t *testing.T) { + clean := FilterArgs(nil) + assert.Nil(t, clean) +} + +// --- ParseFlag --- + +func TestParseFlag_ShortValid_Good(t *testing.T) { + // Single letter + k, v, ok := ParseFlag("-v") + assert.True(t, ok) + assert.Equal(t, "v", k) + assert.Equal(t, "", v) + + // Single emoji + k, v, ok = ParseFlag("-🔥") + assert.True(t, ok) + assert.Equal(t, "🔥", k) + assert.Equal(t, "", v) + + // Short with value + k, v, ok = ParseFlag("-p=8080") + assert.True(t, ok) + assert.Equal(t, "p", k) + assert.Equal(t, "8080", v) +} + +func TestParseFlag_ShortInvalid_Bad(t *testing.T) { + // Multiple chars with single dash — invalid + _, _, ok := ParseFlag("-verbose") + assert.False(t, ok) + + _, _, ok = ParseFlag("-port") + assert.False(t, ok) +} + +func TestParseFlag_LongValid_Good(t *testing.T) { + k, v, ok := ParseFlag("--verbose") + assert.True(t, ok) + assert.Equal(t, "verbose", k) + assert.Equal(t, "", v) + + k, v, ok = ParseFlag("--port=8080") + assert.True(t, ok) + assert.Equal(t, "port", k) + assert.Equal(t, "8080", v) +} + +func TestParseFlag_LongInvalid_Bad(t *testing.T) { + // Single char with double dash — invalid + _, _, ok := ParseFlag("--v") + assert.False(t, ok) +} + +func TestParseFlag_NotAFlag_Bad(t *testing.T) { + _, _, ok := ParseFlag("hello") + assert.False(t, ok) + + _, _, ok = ParseFlag("") + assert.False(t, ok) +} + +// --- IsFlag --- + +func TestIsFlag_Good(t *testing.T) { + assert.True(t, IsFlag("-v")) + assert.True(t, IsFlag("--verbose")) + assert.True(t, IsFlag("-")) +} + +func TestIsFlag_Bad(t *testing.T) { + assert.False(t, IsFlag("hello")) + assert.False(t, IsFlag("")) +} + +// --- Arg --- + +func TestArg_String_Good(t *testing.T) { + r := Arg(0, "hello", 42, true) + assert.True(t, r.OK) + assert.Equal(t, "hello", r.Value) +} + +func TestArg_Int_Good(t *testing.T) { + r := Arg(1, "hello", 42, true) + assert.True(t, r.OK) + assert.Equal(t, 42, r.Value) +} + +func TestArg_Bool_Good(t *testing.T) { + r := Arg(2, "hello", 42, true) + assert.True(t, r.OK) + assert.Equal(t, true, r.Value) +} + +func TestArg_UnsupportedType_Good(t *testing.T) { + r := Arg(0, 3.14) + assert.True(t, r.OK) + assert.Equal(t, 3.14, r.Value) +} + +func TestArg_OutOfBounds_Bad(t *testing.T) { + r := Arg(5, "only", "two") + assert.False(t, r.OK) + assert.Nil(t, r.Value) +} + +func TestArg_NoArgs_Bad(t *testing.T) { + r := Arg(0) + assert.False(t, r.OK) + assert.Nil(t, r.Value) +} + +func TestArg_ErrorDetection_Good(t *testing.T) { + err := errors.New("fail") + r := Arg(0, err) + assert.True(t, r.OK) + assert.Equal(t, err, r.Value) +} + +// --- ArgString --- + +func TestArgString_Good(t *testing.T) { + assert.Equal(t, "hello", ArgString(0, "hello", 42)) + assert.Equal(t, "world", ArgString(1, "hello", "world")) +} + +func TestArgString_WrongType_Bad(t *testing.T) { + assert.Equal(t, "", ArgString(0, 42)) +} + +func TestArgString_OutOfBounds_Bad(t *testing.T) { + assert.Equal(t, "", ArgString(3, "only")) +} + +// --- ArgInt --- + +func TestArgInt_Good(t *testing.T) { + assert.Equal(t, 42, ArgInt(0, 42, "hello")) + assert.Equal(t, 99, ArgInt(1, 0, 99)) +} + +func TestArgInt_WrongType_Bad(t *testing.T) { + assert.Equal(t, 0, ArgInt(0, "not an int")) +} + +func TestArgInt_OutOfBounds_Bad(t *testing.T) { + assert.Equal(t, 0, ArgInt(5, 1, 2)) +} + +// --- ArgBool --- + +func TestArgBool_Good(t *testing.T) { + assert.Equal(t, true, ArgBool(0, true, "hello")) + assert.Equal(t, false, ArgBool(1, true, false)) +} + +func TestArgBool_WrongType_Bad(t *testing.T) { + assert.Equal(t, false, ArgBool(0, "not a bool")) +} + +func TestArgBool_OutOfBounds_Bad(t *testing.T) { + assert.Equal(t, false, ArgBool(5, true)) +} + +// --- Result.Result() --- + +func TestResult_Result_SingleArg_Good(t *testing.T) { + r := Result{}.Result("value") + assert.True(t, r.OK) + assert.Equal(t, "value", r.Value) +} + +func TestResult_Result_NilError_Good(t *testing.T) { + r := Result{}.Result("value", nil) + assert.True(t, r.OK) + assert.Equal(t, "value", r.Value) +} + +func TestResult_Result_WithError_Bad(t *testing.T) { + err := errors.New("fail") + r := Result{}.Result("value", err) + assert.False(t, r.OK) + assert.Equal(t, err, r.Value) +} + +func TestResult_Result_ZeroArgs_Good(t *testing.T) { + r := Result{"hello", true} + got := r.Result() + assert.Equal(t, "hello", got.Value) + assert.True(t, got.OK) +} + +func TestResult_Result_ZeroArgs_Empty_Good(t *testing.T) { + r := Result{} + got := r.Result() + assert.Nil(t, got.Value) + assert.False(t, got.OK) +}