chore(quality-gates): add Justfile, .pre-commit-config.yaml, and check scripts

- Justfile: standard recipes (dev, test, lint, fmt, typecheck, validate-skills,
  build, up, down, restart, logs, health, test-live, setup, gen-token,
  check-contract, clean) using uv/ruff for Python
- .pre-commit-config.yaml: four hooks (skills-validate, docker-security,
  no-baked-env, ensure-ignore-files)
- scripts/: copy check-docker-security.sh, check-no-baked-env.sh,
  check-outdated-deps.sh, ensure-ignore-files.sh, lint-plugin.sh from
  claude-homelab canonical source; all chmod +x

Closes claude-homelab-5sy
This commit is contained in:
Jacob Magar 2026-03-31 17:25:56 -04:00
parent 80d1fc02c6
commit 583fed3cd1
7 changed files with 1334 additions and 0 deletions

23
.pre-commit-config.yaml Normal file
View file

@ -0,0 +1,23 @@
repos:
- repo: local
hooks:
- id: skills-validate
name: Validate skills
entry: just validate-skills
language: system
pass_filenames: false
- id: docker-security
name: Docker security check
entry: bash scripts/check-docker-security.sh
language: system
pass_filenames: false
- id: no-baked-env
name: No baked env vars
entry: bash scripts/check-no-baked-env.sh
language: system
pass_filenames: false
- id: ensure-ignore-files
name: Ensure ignore files
entry: bash scripts/ensure-ignore-files.sh --check
language: system
pass_filenames: false

105
Justfile Normal file
View file

@ -0,0 +1,105 @@
# Unraid MCP Server — Justfile
# Run `just` to list available recipes
default:
@just --list
# ── Development ───────────────────────────────────────────────────────────────
# Start development server (hot-reload via watchfiles)
dev:
uv run python -m unraid_mcp
# Run tests
test:
uv run pytest tests/ -v
# Run linter (ruff)
lint:
uv run ruff check .
# Format code (ruff)
fmt:
uv run ruff format .
# Type-check (pyright / mypy)
typecheck:
uv run pyright unraid_mcp/ || uv run mypy unraid_mcp/
# Validate skills SKILL.md files exist and are non-empty
validate-skills:
@echo "=== Validating skills ==="
@for f in skills/*/SKILL.md; do \
if [ -f "$$f" ]; then \
echo "$$f"; \
else \
echo " ✗ SKILL.md not found in skills/"; \
exit 1; \
fi; \
done
@echo "Skills validation passed"
# Build Docker image
build:
docker build -t unraid-mcp .
# ── Docker Compose ────────────────────────────────────────────────────────────
# Start containers
up:
docker compose up -d
# Stop containers
down:
docker compose down
# Restart containers
restart:
docker compose restart
# Tail container logs
logs:
docker compose logs -f
# Check /health endpoint
health:
@PORT=$$(grep -E '^UNRAID_MCP_PORT=' .env 2>/dev/null | cut -d= -f2 || echo 6970); \
curl -sf "http://localhost:$$PORT/health" | python3 -m json.tool || echo "Health check failed"
# ── Live Testing ──────────────────────────────────────────────────────────────
# Run live integration tests against a running server
test-live:
uv run pytest tests/ -v -m live
# ── Setup ─────────────────────────────────────────────────────────────────────
# Create .env from .env.example if missing
setup:
@if [ ! -f .env ]; then \
cp .env.example .env && chmod 600 .env; \
echo "Created .env from .env.example — fill in your credentials"; \
else \
echo ".env already exists"; \
fi
# Generate a secure bearer token for UNRAID_MCP_TOKEN
gen-token:
@python3 -c "import secrets; print(secrets.token_urlsafe(32))"
# ── Quality Gates ─────────────────────────────────────────────────────────────
# Run docker security checks
check-contract:
bash scripts/check-docker-security.sh
bash scripts/check-no-baked-env.sh
bash scripts/ensure-ignore-files.sh --check
# ── Cleanup ───────────────────────────────────────────────────────────────────
# Remove build artifacts, caches, and compiled files
clean:
rm -rf dist/ build/ .pytest_cache/ .ruff_cache/ .mypy_cache/ htmlcov/ .coverage coverage.xml
find . -type d -name __pycache__ -not -path './.venv/*' -exec rm -rf {} + 2>/dev/null || true
find . -name '*.pyc' -not -path './.venv/*' -delete 2>/dev/null || true
@echo "Cleaned build artifacts"

145
scripts/check-docker-security.sh Executable file
View file

@ -0,0 +1,145 @@
#!/usr/bin/env bash
# check-docker-security.sh — Verify Dockerfile follows plugin security conventions
# Run standalone: bash scripts/check-docker-security.sh [path/to/Dockerfile]
# Run in pre-commit: add as a hook (see .pre-commit-config.yaml example in plugin-setup-guide)
#
# Checks:
# 1. Multi-stage build (separate builder + runtime stages)
# 2. Non-root user (USER 1000:1000 or ${PUID}:${PGID})
# 3. No sensitive ENV directives baked into the image
# 4. HEALTHCHECK present
set -euo pipefail
PASS=0
FAIL=0
WARN=0
pass() { echo " ✓ PASS: $1"; PASS=$((PASS + 1)); }
fail() { echo " ✗ FAIL: $1$2"; FAIL=$((FAIL + 1)); }
warn() { echo " ⚠ WARN: $1$2"; WARN=$((WARN + 1)); }
# Find Dockerfile
DOCKERFILE="${1:-Dockerfile}"
if [[ ! -f "$DOCKERFILE" ]]; then
echo "Error: $DOCKERFILE not found" >&2
exit 1
fi
echo "=== Docker Security Check: $DOCKERFILE ==="
# ── 1. Multi-stage build ─────────────────────────────────────────────────────
FROM_COUNT=$(grep -cE '^FROM\s' "$DOCKERFILE" || true)
if [[ "$FROM_COUNT" -ge 2 ]]; then
pass "Multi-stage build ($FROM_COUNT stages)"
else
fail "Multi-stage build" "Found $FROM_COUNT FROM directive(s) — need at least 2 (builder + runtime)"
fi
# Check for named stages
if grep -qE '^FROM\s.+\sAS\s+builder' "$DOCKERFILE"; then
pass "Named builder stage"
else
warn "Named builder stage" "No 'FROM ... AS builder' found — recommend naming stages"
fi
if grep -qE '^FROM\s.+\sAS\s+runtime' "$DOCKERFILE"; then
pass "Named runtime stage"
else
warn "Named runtime stage" "No 'FROM ... AS runtime' found — recommend naming stages"
fi
# ── 2. Non-root user ─────────────────────────────────────────────────────────
# Check for USER directive
if grep -qE '^USER\s' "$DOCKERFILE"; then
USER_LINE=$(grep -E '^USER\s' "$DOCKERFILE" | tail -1)
USER_VALUE=$(echo "$USER_LINE" | sed 's/^USER\s*//')
# Check for 1000:1000 or variable-based UID:GID
if echo "$USER_VALUE" | grep -qE '^\$?\{?PUID|1000:1000|1000$'; then
pass "Non-root user ($USER_VALUE)"
else
warn "Non-root user" "USER is '$USER_VALUE' — expected 1000:1000 or \${PUID}:\${PGID}"
fi
else
# Check if docker-compose.yaml handles it via user: directive
if [[ -f "docker-compose.yaml" ]] && grep -qE '^\s+user:' docker-compose.yaml; then
warn "Non-root user" "No USER in Dockerfile but docker-compose.yaml sets user: — acceptable if always run via compose"
else
fail "Non-root user" "No USER directive found — container runs as root"
fi
fi
# Check there's no USER root after the runtime stage
RUNTIME_START=$(grep -nE '^FROM\s.+\sAS\s+runtime' "$DOCKERFILE" | head -1 | cut -d: -f1 || true)
if [[ -n "$RUNTIME_START" ]]; then
if tail -n +"$RUNTIME_START" "$DOCKERFILE" | grep -qE '^USER\s+root'; then
fail "No root in runtime" "USER root found after runtime stage — never run as root in production"
else
pass "No root in runtime stage"
fi
fi
# ── 3. No sensitive ENV baked in ──────────────────────────────────────────────
SENSITIVE_PATTERNS='(API_KEY|TOKEN|SECRET|PASSWORD|CREDENTIAL|PRIVATE_KEY|AUTH)'
BAKED_ENVS=$(grep -nE "^ENV\s+.*${SENSITIVE_PATTERNS}" "$DOCKERFILE" || true)
if [[ -n "$BAKED_ENVS" ]]; then
fail "No baked secrets" "Sensitive ENV directives found in Dockerfile:"
echo "$BAKED_ENVS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No baked secrets in ENV directives"
fi
# Check for ARG with defaults that look like secrets
BAKED_ARGS=$(grep -nE "^ARG\s+.*${SENSITIVE_PATTERNS}.*=" "$DOCKERFILE" || true)
if [[ -n "$BAKED_ARGS" ]]; then
warn "No baked ARG secrets" "ARG with sensitive defaults found (may leak via docker history):"
echo "$BAKED_ARGS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No baked secrets in ARG defaults"
fi
# ── 4. HEALTHCHECK ────────────────────────────────────────────────────────────
if grep -qE '^HEALTHCHECK\s' "$DOCKERFILE"; then
pass "HEALTHCHECK directive present"
if grep -qE '/health' "$DOCKERFILE"; then
pass "HEALTHCHECK uses /health endpoint"
else
warn "HEALTHCHECK endpoint" "HEALTHCHECK doesn't reference /health — ensure it matches your health endpoint"
fi
else
warn "HEALTHCHECK" "No HEALTHCHECK in Dockerfile — relying on docker-compose healthcheck only"
fi
# ── 5. Dependency layer caching ───────────────────────────────────────────────
# Check that manifest files are copied before source (for layer caching)
COPY_LINES=$(grep -nE '^COPY\s' "$DOCKERFILE" || true)
FIRST_MANIFEST_COPY=""
FIRST_SOURCE_COPY=""
while IFS= read -r line; do
linenum=$(echo "$line" | cut -d: -f1)
content=$(echo "$line" | cut -d: -f2-)
if echo "$content" | grep -qE '(pyproject\.toml|package.*\.json|Cargo\.(toml|lock)|go\.(mod|sum)|uv\.lock)'; then
[[ -z "$FIRST_MANIFEST_COPY" ]] && FIRST_MANIFEST_COPY="$linenum"
elif echo "$content" | grep -qE '\.\s+\.|src/|lib/'; then
[[ -z "$FIRST_SOURCE_COPY" ]] && FIRST_SOURCE_COPY="$linenum"
fi
done <<< "$COPY_LINES"
if [[ -n "$FIRST_MANIFEST_COPY" && -n "$FIRST_SOURCE_COPY" ]]; then
if [[ "$FIRST_MANIFEST_COPY" -lt "$FIRST_SOURCE_COPY" ]]; then
pass "Dependency manifest copied before source (layer caching)"
else
warn "Layer caching" "Source copied before dependency manifest — swap order for better Docker layer caching"
fi
fi
# ── Summary ───────────────────────────────────────────────────────────────────
echo
echo "Results: $PASS passed, $FAIL failed, $WARN warnings"
[[ "$FAIL" -eq 0 ]] && echo "DOCKER SECURITY CHECK PASSED" && exit 0
echo "DOCKER SECURITY CHECK FAILED" && exit 1

138
scripts/check-no-baked-env.sh Executable file
View file

@ -0,0 +1,138 @@
#!/usr/bin/env bash
# check-no-baked-env.sh — Verify env vars aren't baked into Docker artifacts
# Run standalone: bash scripts/check-no-baked-env.sh [project-dir]
# Run in pre-commit: add as a hook (see .pre-commit-config.yaml example in plugin-setup-guide)
#
# Checks:
# 1. docker-compose.yaml has no `environment:` block (all config via env_file only)
# 2. Dockerfile has no ENV with real/sensitive values
# 3. No hardcoded URLs, tokens, or credentials in Dockerfile or docker-compose.yaml
set -euo pipefail
PROJECT_DIR="${1:-.}"
PASS=0
FAIL=0
WARN=0
pass() { echo " ✓ PASS: $1"; PASS=$((PASS + 1)); }
fail() { echo " ✗ FAIL: $1$2"; FAIL=$((FAIL + 1)); }
warn() { echo " ⚠ WARN: $1$2"; WARN=$((WARN + 1)); }
echo "=== No Baked Env Vars Check: $PROJECT_DIR ==="
# ── 1. docker-compose.yaml — no environment: block ───────────────────────────
COMPOSE_FILE="$PROJECT_DIR/docker-compose.yaml"
if [[ -f "$COMPOSE_FILE" ]]; then
# Check for environment: key under services
if grep -qE '^\s+environment:' "$COMPOSE_FILE"; then
fail "No environment: block in docker-compose.yaml" \
"Found 'environment:' block — all env vars must come from env_file: .env only"
echo " Offending lines:"
grep -nE '^\s+environment:|^\s+-\s+\w+=' "$COMPOSE_FILE" | head -10 | while IFS= read -r line; do
echo " $line"
done
echo
echo " Fix: Remove the environment: block entirely."
echo " Add all variables to .env and .env.example instead."
echo " docker-compose.yaml should only use 'env_file: .env'"
else
pass "No environment: block in docker-compose.yaml"
fi
# Verify env_file is present
if grep -qE '^\s+env_file:' "$COMPOSE_FILE"; then
pass "env_file: directive present"
else
fail "env_file: directive" "No env_file: found — services won't receive credentials"
fi
# Check for hardcoded values in compose environment blocks (not variable references)
# Filter: lines that set KEY=VALUE where VALUE doesn't start with $ (variable ref)
HARDCODED=$(grep -nE '^\s+-\s+\w+=[^$]' "$COMPOSE_FILE" | grep -vE '=(true|false)$' || true)
if [[ -n "$HARDCODED" ]]; then
# Filter out known safe patterns
SUSPICIOUS=$(echo "$HARDCODED" | grep -vE '(build:|image:|container_name:|restart:|test:|interval:|timeout:|retries:|start_period:|memory:|cpus:|name:)' || true)
if [[ -n "$SUSPICIOUS" ]]; then
warn "Hardcoded values in compose" "Found potentially hardcoded values:"
echo "$SUSPICIOUS" | head -5 | while IFS= read -r line; do
echo " $line"
done
fi
fi
else
warn "docker-compose.yaml" "File not found at $COMPOSE_FILE — skipping compose checks"
fi
# ── 2. Dockerfile — no sensitive ENV values ───────────────────────────────────
DOCKERFILE="$PROJECT_DIR/Dockerfile"
if [[ -f "$DOCKERFILE" ]]; then
# Sensitive patterns that should never be in ENV
SENSITIVE_RE='(API_KEY|TOKEN|SECRET|PASSWORD|CREDENTIAL|PRIVATE_KEY|AUTH_TOKEN|BEARER)'
# Check ENV directives for sensitive variable names with values
SENSITIVE_ENVS=$(grep -nE "^ENV\s+\S*${SENSITIVE_RE}\S*\s*=" "$DOCKERFILE" || true)
if [[ -n "$SENSITIVE_ENVS" ]]; then
fail "No sensitive ENV in Dockerfile" "Found ENV directives with sensitive variable names:"
echo "$SENSITIVE_ENVS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No sensitive ENV in Dockerfile"
fi
# Check for ENV with hardcoded URLs (might contain credentials)
URL_ENVS=$(grep -nE '^ENV\s+\S+\s*=\s*https?://' "$DOCKERFILE" || true)
if [[ -n "$URL_ENVS" ]]; then
warn "Hardcoded URLs in ENV" "Found ENV with hardcoded URLs (may contain credentials):"
echo "$URL_ENVS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No hardcoded URLs in ENV"
fi
# Check for COPY .env into image
if grep -qE '^COPY\s+.*\.env\s' "$DOCKERFILE"; then
fail "No .env in image" "Dockerfile copies .env into the image — credentials will be baked in"
else
pass "No .env copied into image"
fi
# Check .dockerignore excludes .env
DOCKERIGNORE="$PROJECT_DIR/.dockerignore"
if [[ -f "$DOCKERIGNORE" ]]; then
if grep -qE '^\s*\.env\s*$' "$DOCKERIGNORE"; then
pass ".dockerignore excludes .env"
else
fail ".dockerignore" ".env not excluded — secrets may leak into build context"
fi
else
warn ".dockerignore" "File not found — create one that excludes .env"
fi
else
warn "Dockerfile" "File not found at $DOCKERFILE — skipping Dockerfile checks"
fi
# ── 3. entrypoint.sh — no hardcoded credentials ──────────────────────────────
ENTRYPOINT="$PROJECT_DIR/entrypoint.sh"
if [[ -f "$ENTRYPOINT" ]]; then
CRED_PATTERNS='(password|secret|token|api.key)\s*=\s*["\x27][^$]'
HARDCODED_CREDS=$(grep -inE "$CRED_PATTERNS" "$ENTRYPOINT" || true)
if [[ -n "$HARDCODED_CREDS" ]]; then
fail "No hardcoded creds in entrypoint.sh" "Found suspicious hardcoded values:"
echo "$HARDCODED_CREDS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No hardcoded credentials in entrypoint.sh"
fi
else
# entrypoint.sh is optional
true
fi
# ── Summary ───────────────────────────────────────────────────────────────────
echo
echo "Results: $PASS passed, $FAIL failed, $WARN warnings"
[[ "$FAIL" -eq 0 ]] && echo "NO BAKED ENV CHECK PASSED" && exit 0
echo "NO BAKED ENV CHECK FAILED" && exit 1

181
scripts/check-outdated-deps.sh Executable file
View file

@ -0,0 +1,181 @@
#!/usr/bin/env bash
# check-outdated-deps.sh — Report outdated dependencies for Python/TypeScript/Rust projects
# Run standalone: bash scripts/check-outdated-deps.sh [project-dir]
#
# Auto-detects language from manifest files and reports outdated packages.
# Exit code: 0 = all current, 1 = outdated found, 2 = tool error
#
# Not recommended for pre-commit (requires network, slow). Run periodically or in CI.
set -euo pipefail
PROJECT_DIR="${1:-.}"
FOUND_OUTDATED=0
CHECKED=0
echo "=== Outdated Dependencies Check: $PROJECT_DIR ==="
echo
# ── Python (uv) ──────────────────────────────────────────────────────────────
if [[ -f "$PROJECT_DIR/pyproject.toml" ]]; then
CHECKED=$((CHECKED + 1))
echo "── Python (uv) ──"
if command -v uv &>/dev/null; then
# Check if lock file is current
if [[ -f "$PROJECT_DIR/uv.lock" ]]; then
if (cd "$PROJECT_DIR" && uv lock --check 2>/dev/null); then
echo " ✓ uv.lock is up to date"
else
echo " ⚠ uv.lock is out of sync with pyproject.toml — run 'uv lock'"
FOUND_OUTDATED=1
fi
fi
# Show outdated packages
echo " Checking for outdated packages..."
OUTDATED=$(cd "$PROJECT_DIR" && uv pip list --outdated 2>/dev/null || true)
if [[ -n "$OUTDATED" && "$OUTDATED" != *"No outdated packages"* ]]; then
LINE_COUNT=$(echo "$OUTDATED" | wc -l)
if [[ "$LINE_COUNT" -gt 2 ]]; then # Header lines
echo "$OUTDATED" | head -20
FOUND_OUTDATED=1
else
echo " ✓ All Python packages are current"
fi
else
echo " ✓ All Python packages are current"
fi
# Check pyproject.toml for pinned versions that may be outdated
echo " Checking pyproject.toml dependency pins..."
PINNED=$(grep -E '^\s*"[^"]+==\d' "$PROJECT_DIR/pyproject.toml" 2>/dev/null || true)
if [[ -n "$PINNED" ]]; then
echo " ⚠ Found exact-pinned dependencies (consider using >= or ~=):"
echo "$PINNED" | head -10 | while IFS= read -r line; do
echo " $line"
done
fi
else
echo " ⚠ uv not found — install with: curl -LsSf https://astral.sh/uv/install.sh | sh"
fi
echo
fi
# ── TypeScript / JavaScript (npm) ────────────────────────────────────────────
if [[ -f "$PROJECT_DIR/package.json" ]]; then
CHECKED=$((CHECKED + 1))
echo "── TypeScript / JavaScript ──"
if command -v npm &>/dev/null; then
echo " Checking for outdated packages..."
OUTDATED=$(cd "$PROJECT_DIR" && npm outdated --json 2>/dev/null || true)
if [[ -n "$OUTDATED" && "$OUTDATED" != "{}" ]]; then
# Parse JSON output for readable display
echo "$OUTDATED" | python3 -c "
import json, sys
try:
data = json.load(sys.stdin)
if data:
print(f' Found {len(data)} outdated package(s):')
print(f' {\"Package\":<30} {\"Current\":<15} {\"Wanted\":<15} {\"Latest\":<15}')
print(f' {\"─\"*30} {\"─\"*15} {\"─\"*15} {\"─\"*15}')
for pkg, info in sorted(data.items()):
current = info.get('current', '?')
wanted = info.get('wanted', '?')
latest = info.get('latest', '?')
marker = ' ← MAJOR' if current.split('.')[0] != latest.split('.')[0] else ''
print(f' {pkg:<30} {current:<15} {wanted:<15} {latest:<15}{marker}')
except (json.JSONDecodeError, KeyError):
print(' ⚠ Could not parse npm outdated output')
" 2>/dev/null || echo " ⚠ Could not parse npm outdated output"
FOUND_OUTDATED=1
else
echo " ✓ All npm packages are current"
fi
# Check for npm audit vulnerabilities
echo " Checking for known vulnerabilities..."
AUDIT=$(cd "$PROJECT_DIR" && npm audit --json 2>/dev/null || true)
VULN_COUNT=$(echo "$AUDIT" | python3 -c "
import json, sys
try:
data = json.load(sys.stdin)
total = data.get('metadata', {}).get('vulnerabilities', {})
count = sum(v for k, v in total.items() if k != 'info')
print(count)
except:
print(0)
" 2>/dev/null || echo "0")
if [[ "$VULN_COUNT" -gt 0 ]]; then
echo " ⚠ Found $VULN_COUNT known vulnerabilities — run 'npm audit' for details"
else
echo " ✓ No known vulnerabilities"
fi
else
echo " ⚠ npm not found"
fi
echo
fi
# ── Rust (cargo) ──────────────────────────────────────────────────────────────
if [[ -f "$PROJECT_DIR/Cargo.toml" ]]; then
CHECKED=$((CHECKED + 1))
echo "── Rust (cargo) ──"
if command -v cargo &>/dev/null; then
# Check if cargo-outdated is installed
if cargo outdated --version &>/dev/null 2>&1; then
echo " Checking for outdated crates..."
OUTDATED=$(cd "$PROJECT_DIR" && cargo outdated --root-deps-only 2>/dev/null || true)
if echo "$OUTDATED" | grep -qE '^\w'; then
echo "$OUTDATED" | head -20
FOUND_OUTDATED=1
else
echo " ✓ All Rust crates are current"
fi
else
echo " ⚠ cargo-outdated not installed — install with: cargo install cargo-outdated"
echo " Falling back to Cargo.lock age check..."
if [[ -f "$PROJECT_DIR/Cargo.lock" ]]; then
LOCK_AGE_DAYS=$(( ($(date +%s) - $(stat -c %Y "$PROJECT_DIR/Cargo.lock")) / 86400 ))
if [[ "$LOCK_AGE_DAYS" -gt 30 ]]; then
echo " ⚠ Cargo.lock is $LOCK_AGE_DAYS days old — consider running 'cargo update'"
else
echo " ✓ Cargo.lock updated within last 30 days ($LOCK_AGE_DAYS days ago)"
fi
fi
fi
# Check for cargo audit
if cargo audit --version &>/dev/null 2>&1; then
echo " Checking for known vulnerabilities..."
if (cd "$PROJECT_DIR" && cargo audit --quiet 2>/dev/null); then
echo " ✓ No known vulnerabilities"
else
echo " ⚠ Vulnerabilities found — run 'cargo audit' for details"
FOUND_OUTDATED=1
fi
else
echo " ⚠ cargo-audit not installed — install with: cargo install cargo-audit"
fi
else
echo " ⚠ cargo not found"
fi
echo
fi
# ── Summary ───────────────────────────────────────────────────────────────────
if [[ "$CHECKED" -eq 0 ]]; then
echo "No recognized project manifests found (pyproject.toml, package.json, Cargo.toml)"
exit 2
fi
echo "=== Summary ==="
if [[ "$FOUND_OUTDATED" -eq 0 ]]; then
echo "All dependencies are current across $CHECKED project(s)."
exit 0
else
echo "Outdated dependencies found. Review above and update as needed."
exit 1
fi

271
scripts/ensure-ignore-files.sh Executable file
View file

@ -0,0 +1,271 @@
#!/usr/bin/env bash
# ensure-ignore-files.sh — Ensure .gitignore and .dockerignore have all required patterns
#
# Modes:
# (default) Append missing patterns to the files (SessionStart hook)
# --check Report missing patterns and exit non-zero if any are missing (pre-commit/CI)
#
# Usage:
# bash scripts/ensure-ignore-files.sh [--check] [project-dir]
#
# As a plugin hook:
# "command": "${CLAUDE_PLUGIN_ROOT}/hooks/scripts/ensure-ignore-files.sh"
set -euo pipefail
CHECK_MODE=false
if [[ "${1:-}" == "--check" ]]; then
CHECK_MODE=true
shift
fi
PROJECT_DIR="${1:-${CLAUDE_PLUGIN_ROOT:-.}}"
PASS=0
FAIL=0
WARN=0
pass() { PASS=$((PASS + 1)); if $CHECK_MODE; then echo "$1"; fi; }
fail() { FAIL=$((FAIL + 1)); echo " ✗ FAIL: $1$2"; }
warn() { WARN=$((WARN + 1)); if $CHECK_MODE; then echo " ⚠ WARN: $1$2"; fi; }
ensure_pattern() {
local file="$1"
local pattern="$2"
local label="$3"
if grep -qxF "$pattern" "$file" 2>/dev/null; then
pass "$label: '$pattern'"
elif $CHECK_MODE; then
fail "$label: '$pattern'" "missing"
else
echo "$pattern" >> "$file"
pass "$label: '$pattern' (added)"
fi
}
# ═══════════════════════════════════════════════════════════════════════════════
# .gitignore — full required pattern list from plugin-setup-guide
# ═══════════════════════════════════════════════════════════════════════════════
GITIGNORE="$PROJECT_DIR/.gitignore"
if $CHECK_MODE; then echo "=== Ignore Files Check: $PROJECT_DIR ==="; echo "── .gitignore ──"; fi
if [[ ! -f "$GITIGNORE" ]] && $CHECK_MODE; then
fail ".gitignore" "File not found — every plugin repo must have a .gitignore"
else
touch "$GITIGNORE"
# ── Secrets ──
REQUIRED_GIT=(
".env"
".env.*"
"!.env.example"
)
# ── Runtime / hook artifacts ──
REQUIRED_GIT+=(
"backups/*"
"!backups/.gitkeep"
"logs/*"
"!logs/.gitkeep"
"*.log"
)
# ── Claude Code / AI tooling ──
REQUIRED_GIT+=(
".claude/settings.local.json"
".claude/worktrees/"
".omc/"
".lavra/"
".beads/"
".serena/"
".worktrees"
".full-review/"
".full-review-archive-*"
)
# ── IDE / editor ──
REQUIRED_GIT+=(
".vscode/"
".cursor/"
".windsurf/"
".1code/"
)
# ── Caches ──
REQUIRED_GIT+=(
".cache/"
)
# ── Documentation artifacts ──
REQUIRED_GIT+=(
"docs/plans/"
"docs/sessions/"
"docs/reports/"
"docs/research/"
"docs/superpowers/"
)
for pattern in "${REQUIRED_GIT[@]}"; do
ensure_pattern "$GITIGNORE" "$pattern" ".gitignore"
done
# ── Language-specific (check only, don't auto-add — user must uncomment) ──
if $CHECK_MODE; then
if [[ -f "$PROJECT_DIR/pyproject.toml" ]]; then
echo " Detected: Python project"
for p in ".venv/" "__pycache__/" "*.py[oc]" "*.egg-info/" "dist/" "build/"; do
if grep -qxF "$p" "$GITIGNORE" 2>/dev/null; then
pass ".gitignore (Python): '$p'"
else
warn ".gitignore (Python)" "'$p' not found — uncomment Python section"
fi
done
fi
if [[ -f "$PROJECT_DIR/package.json" ]]; then
echo " Detected: TypeScript/JavaScript project"
for p in "node_modules/" "dist/" "build/"; do
if grep -qxF "$p" "$GITIGNORE" 2>/dev/null; then
pass ".gitignore (TypeScript): '$p'"
else
warn ".gitignore (TypeScript)" "'$p' not found — uncomment TS section"
fi
done
fi
if [[ -f "$PROJECT_DIR/Cargo.toml" ]]; then
echo " Detected: Rust project"
for p in "target/"; do
if grep -qxF "$p" "$GITIGNORE" 2>/dev/null; then
pass ".gitignore (Rust): '$p'"
else
warn ".gitignore (Rust)" "'$p' not found — uncomment Rust section"
fi
done
fi
# Verify .env.example is NOT ignored
if git -C "$PROJECT_DIR" check-ignore .env.example > /dev/null 2>&1; then
fail ".gitignore" ".env.example is being ignored — '!.env.example' must come after '.env.*'"
else
pass ".gitignore: .env.example is tracked (not ignored)"
fi
fi
fi
# ═══════════════════════════════════════════════════════════════════════════════
# .dockerignore — full required pattern list from plugin-setup-guide
# ═══════════════════════════════════════════════════════════════════════════════
DOCKERIGNORE="$PROJECT_DIR/.dockerignore"
# Skip if no Dockerfile
if [[ ! -f "$PROJECT_DIR/Dockerfile" ]]; then
if $CHECK_MODE; then echo; echo "── .dockerignore ──"; echo " No Dockerfile found — skipping"; fi
else
if $CHECK_MODE; then echo; echo "── .dockerignore ──"; fi
if [[ ! -f "$DOCKERIGNORE" ]] && $CHECK_MODE; then
fail ".dockerignore" "File not found — required when Dockerfile exists"
else
touch "$DOCKERIGNORE"
# ── Version control ──
REQUIRED_DOCKER=(
".git"
".github"
)
# ── Secrets ──
REQUIRED_DOCKER+=(
".env"
".env.*"
"!.env.example"
)
# ── Claude Code / AI tooling ──
REQUIRED_DOCKER+=(
".claude"
".claude-plugin"
".codex-plugin"
".omc"
".lavra"
".beads"
".serena"
".worktrees"
".full-review"
".full-review-archive-*"
)
# ── IDE / editor ──
REQUIRED_DOCKER+=(
".vscode"
".cursor"
".windsurf"
".1code"
)
# ── Docs, tests, scripts — not needed at runtime ──
REQUIRED_DOCKER+=(
"docs"
"tests"
"scripts"
"*.md"
"!README.md"
)
# ── Runtime artifacts ──
REQUIRED_DOCKER+=(
"logs"
"backups"
"*.log"
".cache"
)
for pattern in "${REQUIRED_DOCKER[@]}"; do
ensure_pattern "$DOCKERIGNORE" "$pattern" ".dockerignore"
done
# ── Language-specific (check only) ──
if $CHECK_MODE; then
if [[ -f "$PROJECT_DIR/pyproject.toml" ]]; then
for p in ".venv" "__pycache__/" "*.py[oc]" "*.egg-info" "dist/"; do
if grep -qxF "$p" "$DOCKERIGNORE" 2>/dev/null; then
pass ".dockerignore (Python): '$p'"
else
warn ".dockerignore (Python)" "'$p' not found — uncomment Python section"
fi
done
fi
if [[ -f "$PROJECT_DIR/package.json" ]]; then
for p in "node_modules/" "dist/" "coverage/"; do
if grep -qxF "$p" "$DOCKERIGNORE" 2>/dev/null; then
pass ".dockerignore (TypeScript): '$p'"
else
warn ".dockerignore (TypeScript)" "'$p' not found — uncomment TS section"
fi
done
fi
if [[ -f "$PROJECT_DIR/Cargo.toml" ]]; then
for p in "target/"; do
if grep -qxF "$p" "$DOCKERIGNORE" 2>/dev/null; then
pass ".dockerignore (Rust): '$p'"
else
warn ".dockerignore (Rust)" "'$p' not found — uncomment Rust section"
fi
done
fi
fi
fi
fi
# ═══════════════════════════════════════════════════════════════════════════════
# Summary
# ═══════════════════════════════════════════════════════════════════════════════
if $CHECK_MODE; then
echo
echo "Results: $PASS passed, $FAIL failed, $WARN warnings"
[[ "$FAIL" -eq 0 ]] && echo "IGNORE FILES CHECK PASSED" && exit 0
echo "IGNORE FILES CHECK FAILED" && exit 1
fi

471
scripts/lint-plugin.sh Executable file
View file

@ -0,0 +1,471 @@
#!/usr/bin/env bash
# lint-plugin.sh — Comprehensive plugin linter for MCP server plugin repos
# Validates against conventions in docs/plugin-setup-guide.md
#
# Usage: bash scripts/lint-plugin.sh [project-dir]
# project-dir defaults to current directory
#
# Exit codes:
# 0 — all required checks passed (warnings are OK)
# 1 — one or more required checks failed
set -euo pipefail
PROJECT_DIR="${1:-.}"
PROJECT_DIR="$(cd "$PROJECT_DIR" && pwd)"
PASS=0
FAIL=0
WARN=0
pass() { echo " ✓ PASS: $1"; PASS=$((PASS + 1)); }
fail() { echo " ✗ FAIL: $1$2"; FAIL=$((FAIL + 1)); }
warn() { echo " ⚠ WARN: $1$2"; WARN=$((WARN + 1)); }
echo "=== Plugin Lint: $PROJECT_DIR ==="
echo
# ── 1. Manifest files exist ──────────────────────────────────────────────────
echo "── 1. Manifests exist ──"
for manifest in \
".claude-plugin/plugin.json" \
".codex-plugin/plugin.json" \
".mcp.json" \
".app.json"; do
if [[ -f "$PROJECT_DIR/$manifest" ]]; then
pass "$manifest exists"
else
fail "$manifest" "File not found"
fi
done
echo
# ── 2. plugin.json required fields ───────────────────────────────────────────
echo "── 2. Manifest fields (.claude-plugin/plugin.json) ──"
CLAUDE_PLUGIN="$PROJECT_DIR/.claude-plugin/plugin.json"
if [[ -f "$CLAUDE_PLUGIN" ]]; then
for field in name version description author repository license keywords userConfig; do
if jq -e ".$field" "$CLAUDE_PLUGIN" >/dev/null 2>&1; then
pass "plugin.json has '$field'"
else
fail "plugin.json field '$field'" "Missing required field"
fi
done
else
warn "plugin.json fields" "Skipped — .claude-plugin/plugin.json not found"
fi
echo
# ── 3. userConfig field validation ────────────────────────────────────────────
echo "── 3. userConfig fields ──"
if [[ -f "$CLAUDE_PLUGIN" ]] && jq -e '.userConfig' "$CLAUDE_PLUGIN" >/dev/null 2>&1; then
USER_CONFIG_KEYS=$(jq -r '.userConfig | keys[]' "$CLAUDE_PLUGIN" 2>/dev/null || true)
if [[ -z "$USER_CONFIG_KEYS" ]]; then
fail "userConfig" "No userConfig entries found"
else
while IFS= read -r key; do
MISSING=""
for attr in type title description sensitive; do
if ! jq -e ".userConfig[\"$key\"].$attr" "$CLAUDE_PLUGIN" >/dev/null 2>&1; then
MISSING="${MISSING:+$MISSING, }$attr"
fi
done
if [[ -z "$MISSING" ]]; then
pass "userConfig.$key has all required attributes"
else
fail "userConfig.$key" "Missing: $MISSING"
fi
done <<< "$USER_CONFIG_KEYS"
fi
else
warn "userConfig fields" "Skipped — no userConfig in plugin.json"
fi
echo
# ── 4. Codex manifest — interface.displayName ─────────────────────────────────
echo "── 4. Codex manifest ──"
CODEX_PLUGIN="$PROJECT_DIR/.codex-plugin/plugin.json"
if [[ -f "$CODEX_PLUGIN" ]]; then
if jq -e '.interface' "$CODEX_PLUGIN" >/dev/null 2>&1; then
pass ".codex-plugin/plugin.json has 'interface' object"
if jq -e '.interface.displayName' "$CODEX_PLUGIN" >/dev/null 2>&1; then
pass "interface.displayName present"
else
fail "interface.displayName" "Missing in .codex-plugin/plugin.json"
fi
else
fail "Codex interface" ".codex-plugin/plugin.json missing 'interface' object"
fi
else
warn "Codex manifest" "Skipped — .codex-plugin/plugin.json not found"
fi
echo
# ── 5. Version sync ──────────────────────────────────────────────────────────
echo "── 5. Version sync ──"
PLUGIN_VERSION=""
if [[ -f "$CLAUDE_PLUGIN" ]]; then
PLUGIN_VERSION=$(jq -r '.version // empty' "$CLAUDE_PLUGIN" 2>/dev/null || true)
fi
if [[ -n "$PLUGIN_VERSION" ]]; then
VERSION_CHECKED=false
# pyproject.toml
if [[ -f "$PROJECT_DIR/pyproject.toml" ]]; then
PY_VERSION=$(grep -E '^\s*version\s*=' "$PROJECT_DIR/pyproject.toml" | head -1 | sed 's/.*=\s*"\(.*\)".*/\1/' || true)
if [[ -n "$PY_VERSION" ]]; then
VERSION_CHECKED=true
if [[ "$PY_VERSION" == "$PLUGIN_VERSION" ]]; then
pass "pyproject.toml version ($PY_VERSION) matches plugin.json ($PLUGIN_VERSION)"
else
fail "Version sync" "pyproject.toml=$PY_VERSION vs plugin.json=$PLUGIN_VERSION"
fi
fi
fi
# package.json
if [[ -f "$PROJECT_DIR/package.json" ]]; then
PKG_VERSION=$(jq -r '.version // empty' "$PROJECT_DIR/package.json" 2>/dev/null || true)
if [[ -n "$PKG_VERSION" ]]; then
VERSION_CHECKED=true
if [[ "$PKG_VERSION" == "$PLUGIN_VERSION" ]]; then
pass "package.json version ($PKG_VERSION) matches plugin.json ($PLUGIN_VERSION)"
else
fail "Version sync" "package.json=$PKG_VERSION vs plugin.json=$PLUGIN_VERSION"
fi
fi
fi
# Cargo.toml
if [[ -f "$PROJECT_DIR/Cargo.toml" ]]; then
CARGO_VERSION=$(grep -E '^\s*version\s*=' "$PROJECT_DIR/Cargo.toml" | head -1 | sed 's/.*=\s*"\(.*\)".*/\1/' || true)
if [[ -n "$CARGO_VERSION" ]]; then
VERSION_CHECKED=true
if [[ "$CARGO_VERSION" == "$PLUGIN_VERSION" ]]; then
pass "Cargo.toml version ($CARGO_VERSION) matches plugin.json ($PLUGIN_VERSION)"
else
fail "Version sync" "Cargo.toml=$CARGO_VERSION vs plugin.json=$PLUGIN_VERSION"
fi
fi
fi
if [[ "$VERSION_CHECKED" == "false" ]]; then
warn "Version sync" "No language manifest found (pyproject.toml, package.json, Cargo.toml)"
fi
else
warn "Version sync" "Skipped — no version in plugin.json"
fi
echo
# ── 6. Env naming — no generic vars ──────────────────────────────────────────
echo "── 6. Env naming (no generic vars) ──"
# Generic env var patterns that should be prefixed with the service name
GENERIC_PATTERNS='^\s*(MCP_BEARER_TOKEN|API_KEY|PORT|HOST|TOKEN|SECRET|PASSWORD|AUTH_TOKEN|BEARER_TOKEN|MCP_TOKEN|MCP_PORT|MCP_HOST|DATABASE_URL|DB_URL|DB_HOST|DB_PORT|DB_NAME|DB_USER|DB_PASSWORD)\s*='
GENERIC_FOUND=false
for check_file in \
"$PROJECT_DIR/.env.example" \
"$PROJECT_DIR/docker-compose.yaml"; do
if [[ -f "$check_file" ]]; then
MATCHES=$(grep -nE "$GENERIC_PATTERNS" "$check_file" 2>/dev/null || true)
if [[ -n "$MATCHES" ]]; then
GENERIC_FOUND=true
BASENAME=$(basename "$check_file")
fail "Generic env var in $BASENAME" "All vars must be prefixed with service name"
echo "$MATCHES" | head -5 | while IFS= read -r line; do
echo " $line"
done
fi
fi
done
# Scan source code directories for generic env var usage
for src_dir in "$PROJECT_DIR"/*/; do
dir_name=$(basename "$src_dir")
# Skip non-source directories
case "$dir_name" in
.git|.cache|node_modules|__pycache__|target|.venv|venv|logs|backups|assets|docs|hooks|skills|commands|agents|scripts|tests|.claude-plugin|.codex-plugin|.github) continue ;;
esac
if [[ -d "$src_dir" ]]; then
SRC_MATCHES=$(grep -rnE '(os\.getenv|os\.environ|env::var|process\.env)\s*\(?\s*["\x27]?(MCP_BEARER_TOKEN|API_KEY|PORT|HOST|TOKEN|SECRET|PASSWORD|AUTH_TOKEN|BEARER_TOKEN|MCP_TOKEN)["\x27]?' "$src_dir" 2>/dev/null || true)
if [[ -n "$SRC_MATCHES" ]]; then
GENERIC_FOUND=true
fail "Generic env var in source ($dir_name/)" "All vars must be prefixed with service name"
echo "$SRC_MATCHES" | head -5 | while IFS= read -r line; do
echo " $line"
done
fi
fi
done
if [[ "$GENERIC_FOUND" == "false" ]]; then
pass "No generic env vars found"
fi
echo
# ── 7. Tool pair — domain tool + help tool ────────────────────────────────────
echo "── 7. Tool pair (domain + help) ──"
# Derive expected tool names from plugin name
PLUGIN_NAME=""
if [[ -f "$CLAUDE_PLUGIN" ]]; then
PLUGIN_NAME=$(jq -r '.name // empty' "$CLAUDE_PLUGIN" 2>/dev/null || true)
fi
if [[ -n "$PLUGIN_NAME" ]]; then
# Convert plugin name (e.g. "gotify-mcp") to tool name (e.g. "gotify")
# Strip -mcp suffix, replace hyphens with underscores
TOOL_BASE=$(echo "$PLUGIN_NAME" | sed 's/-mcp$//' | tr '-' '_')
HELP_TOOL="${TOOL_BASE}_help"
# Search source code for tool registration patterns
TOOL_FOUND=false
HELP_FOUND=false
# Look in all source files (Python, TypeScript, Rust)
TOOL_PATTERN="(def ${TOOL_BASE}|\"${TOOL_BASE}\"|'${TOOL_BASE}'|name\s*=\s*\"${TOOL_BASE}\")"
HELP_PATTERN="(def ${HELP_TOOL}|\"${HELP_TOOL}\"|'${HELP_TOOL}'|name\s*=\s*\"${HELP_TOOL}\")"
if grep -rqE "$TOOL_PATTERN" "$PROJECT_DIR" \
--include="*.py" --include="*.ts" --include="*.js" --include="*.rs" --include="*.mjs" \
2>/dev/null; then
TOOL_FOUND=true
fi
if grep -rqE "$HELP_PATTERN" "$PROJECT_DIR" \
--include="*.py" --include="*.ts" --include="*.js" --include="*.rs" --include="*.mjs" \
2>/dev/null; then
HELP_FOUND=true
fi
if [[ "$TOOL_FOUND" == "true" ]]; then
pass "Domain tool '$TOOL_BASE' found in source"
else
fail "Domain tool" "Expected tool '$TOOL_BASE' not found in source code"
fi
if [[ "$HELP_FOUND" == "true" ]]; then
pass "Help tool '$HELP_TOOL' found in source"
else
fail "Help tool" "Expected tool '$HELP_TOOL' not found in source code"
fi
else
warn "Tool pair" "Skipped — could not determine plugin name"
fi
echo
# ── 8. Required files exist ───────────────────────────────────────────────────
echo "── 8. Required files ──"
for req_file in \
CLAUDE.md \
AGENTS.md \
GEMINI.md \
README.md \
CHANGELOG.md \
LICENSE \
.gitignore \
.env.example \
Justfile \
entrypoint.sh \
Dockerfile \
docker-compose.yaml \
.dockerignore \
.pre-commit-config.yaml; do
if [[ -e "$PROJECT_DIR/$req_file" ]]; then
pass "$req_file exists"
else
fail "$req_file" "Required file not found"
fi
done
echo
# ── 9. Symlinks — AGENTS.md and GEMINI.md → CLAUDE.md ────────────────────────
echo "── 9. Symlinks ──"
for symfile in AGENTS.md GEMINI.md; do
if [[ -L "$PROJECT_DIR/$symfile" ]]; then
TARGET=$(readlink "$PROJECT_DIR/$symfile")
if [[ "$TARGET" == "CLAUDE.md" || "$TARGET" == "./CLAUDE.md" ]]; then
pass "$symfile is symlink to CLAUDE.md"
else
fail "$symfile symlink" "Points to '$TARGET' instead of CLAUDE.md"
fi
elif [[ -f "$PROJECT_DIR/$symfile" ]]; then
fail "$symfile" "Exists but is not a symlink — must be symlink to CLAUDE.md"
else
fail "$symfile" "Not found — must be symlink to CLAUDE.md"
fi
done
echo
# ── 10. Skills exist ─────────────────────────────────────────────────────────
echo "── 10. Skills ──"
SKILL_FILES=$(find "$PROJECT_DIR/skills" -name "SKILL.md" -type f 2>/dev/null || true)
if [[ -n "$SKILL_FILES" ]]; then
SKILL_COUNT=$(echo "$SKILL_FILES" | wc -l)
pass "Found $SKILL_COUNT SKILL.md file(s) in skills/"
else
if [[ -d "$PROJECT_DIR/skills" ]]; then
fail "Skills" "skills/ directory exists but no SKILL.md found (expected skills/*/SKILL.md)"
else
fail "Skills" "skills/ directory not found"
fi
fi
echo
# ── 11. Hooks exist ──────────────────────────────────────────────────────────
echo "── 11. Hooks ──"
for hook_file in \
"hooks/hooks.json" \
"hooks/scripts/sync-env.sh" \
"hooks/scripts/fix-env-perms.sh" \
"hooks/scripts/ensure-ignore-files.sh"; do
if [[ -f "$PROJECT_DIR/$hook_file" ]]; then
pass "$hook_file exists"
else
fail "$hook_file" "Required hook file not found"
fi
done
echo
# ── 12. Hook scripts executable ──────────────────────────────────────────────
echo "── 12. Hook scripts executable ──"
if [[ -d "$PROJECT_DIR/hooks/scripts" ]]; then
HOOK_SCRIPTS=$(find "$PROJECT_DIR/hooks/scripts" -name "*.sh" -type f 2>/dev/null || true)
if [[ -n "$HOOK_SCRIPTS" ]]; then
while IFS= read -r script; do
BASENAME=$(basename "$script")
if [[ -x "$script" ]]; then
pass "hooks/scripts/$BASENAME is executable"
else
fail "hooks/scripts/$BASENAME" "Not executable — run: chmod +x hooks/scripts/$BASENAME"
fi
done <<< "$HOOK_SCRIPTS"
else
warn "Hook scripts" "No .sh files found in hooks/scripts/"
fi
else
warn "Hook scripts" "hooks/scripts/ directory not found"
fi
echo
# ── 13. docker-compose.yaml checks ───────────────────────────────────────────
echo "── 13. docker-compose.yaml ──"
COMPOSE="$PROJECT_DIR/docker-compose.yaml"
if [[ -f "$COMPOSE" ]]; then
# env_file: .env present
if grep -qE '^\s+env_file:' "$COMPOSE"; then
pass "docker-compose.yaml has env_file directive"
else
fail "docker-compose.yaml env_file" "No env_file: directive — services need env_file: .env"
fi
# user: directive present
if grep -qE '^\s+user:' "$COMPOSE"; then
pass "docker-compose.yaml has user: directive"
else
fail "docker-compose.yaml user" "No user: directive — must set user: for non-root execution"
fi
# NO environment: block
if grep -qE '^\s+environment:' "$COMPOSE"; then
fail "docker-compose.yaml environment" "Found 'environment:' block — all config must come from env_file: .env only"
else
pass "docker-compose.yaml has no environment: block"
fi
else
warn "docker-compose.yaml" "File not found — skipping compose checks"
fi
echo
# ── 14. SWAG config ──────────────────────────────────────────────────────────
echo "── 14. SWAG config ──"
SWAG_FILES=$(find "$PROJECT_DIR" -maxdepth 1 -name "*.subdomain.conf" -type f 2>/dev/null || true)
if [[ -n "$SWAG_FILES" ]]; then
SWAG_COUNT=$(echo "$SWAG_FILES" | wc -l)
pass "Found $SWAG_COUNT .subdomain.conf file(s)"
else
fail "SWAG config" "No *.subdomain.conf found at repo root"
fi
echo
# ── 15. No committed secrets ─────────────────────────────────────────────────
echo "── 15. No committed secrets ──"
if git -C "$PROJECT_DIR" rev-parse --is-inside-work-tree >/dev/null 2>&1; then
TRACKED_ENV=$(git -C "$PROJECT_DIR" ls-files .env 2>/dev/null || true)
if [[ -n "$TRACKED_ENV" ]]; then
fail "Committed .env" ".env is tracked in git — remove with: git rm --cached .env"
else
pass "No .env tracked in git"
fi
else
warn "Committed secrets" "Not a git repo — cannot check git ls-files"
fi
echo
# ── 16. Directories exist ────────────────────────────────────────────────────
echo "── 16. Required directories ──"
for req_dir in \
"backups" \
"logs" \
"tests" \
"skills"; do
if [[ -d "$PROJECT_DIR/$req_dir" ]]; then
pass "$req_dir/ exists"
else
fail "$req_dir/" "Required directory not found"
fi
done
# Check .gitkeep files
for gitkeep in \
"backups/.gitkeep" \
"logs/.gitkeep"; do
if [[ -f "$PROJECT_DIR/$gitkeep" ]]; then
pass "$gitkeep exists"
else
warn "$gitkeep" "Missing — add empty .gitkeep to track empty directory"
fi
done
echo
# ── 17. assets/ directory ────────────────────────────────────────────────────
echo "── 17. Assets directory ──"
if [[ -d "$PROJECT_DIR/assets" ]]; then
ICON_FILES=$(find "$PROJECT_DIR/assets" -maxdepth 1 -type f \( -name "*.png" -o -name "*.svg" -o -name "*.ico" -o -name "*.jpg" -o -name "*.jpeg" -o -name "*.webp" \) 2>/dev/null || true)
if [[ -n "$ICON_FILES" ]]; then
ICON_COUNT=$(echo "$ICON_FILES" | wc -l)
pass "assets/ directory has $ICON_COUNT icon/image file(s)"
else
fail "assets/ icons" "assets/ directory exists but contains no icon files (png, svg, ico, jpg, webp)"
fi
else
fail "assets/" "assets/ directory not found — required for Codex install surfaces"
fi
echo
# ── Summary ───────────────────────────────────────────────────────────────────
echo "════════════════════════════════════════════════════════"
echo "Results: $PASS passed, $FAIL failed, $WARN warnings"
echo "════════════════════════════════════════════════════════"
if [[ "$FAIL" -eq 0 ]]; then
echo "PLUGIN LINT PASSED"
exit 0
fi
echo "PLUGIN LINT FAILED"
exit 1