feat: add bin/bump-version.sh with bats tests, rename scripts/ to bin/

- bin/bump-version.sh: one-command version bump across all four files;
  supports explicit version or major/minor/patch keywords; uses
  CLAUDE_PLUGIN_ROOT when set (hook context), dirname fallback otherwise
- tests/test_bump_version.bats: 9 bats tests covering all bump modes,
  all-files-in-sync, output format, and dirname fallback
- scripts/ renamed to bin/
- Bump 1.3.5 → 1.3.6
This commit is contained in:
Jacob Magar 2026-04-05 19:09:48 -04:00
parent 93a4ca08c6
commit e27ad5e1fc
13 changed files with 1881 additions and 4 deletions

View file

@ -1,7 +1,7 @@
{
"name": "unraid-mcp",
"displayName": "Unraid MCP",
"version": "1.3.4",
"version": "1.3.6",
"description": "Query, monitor, and manage Unraid servers via GraphQL API through MCP tools. Supports system info, Docker, VMs, array/parity, notifications, plugins, rclone, and live telemetry.",
"author": {
"name": "Jacob Magar",

View file

@ -1,6 +1,6 @@
{
"name": "unraid-mcp",
"version": "1.3.4",
"version": "1.3.6",
"description": "Unraid server management via MCP.",
"homepage": "https://github.com/jmagar/unraid-mcp",
"repository": "https://github.com/jmagar/unraid-mcp",

View file

@ -7,6 +7,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
## [1.3.6] - 2026-04-05
### Added
- **`tests/test_bump_version.bats`**: 9 bats tests for `bin/bump-version.sh` covering explicit version, patch/minor/major keywords, all-files-in-sync, output format, and dirname fallback.
### Changed
- **`bin/bump-version.sh`**: Uses `CLAUDE_PLUGIN_ROOT` as repo root override (set automatically by plugin runtime in hook contexts); falls back to dirname detection for direct dev use.
- **`bin/`**: Renamed from `scripts/` — moved all scripts to `bin/`.
## [1.3.5] - 2026-04-05
### Added
- **`scripts/bump-version.sh`**: One-command version bump across all four version-bearing files. Supports explicit version or `major`/`minor`/`patch` keywords.
## [1.3.4] - 2026-04-05
### Changed

61
bin/bump-version.sh Executable file
View file

@ -0,0 +1,61 @@
#!/usr/bin/env bash
# bump-version.sh — update version in all version-bearing files atomically.
#
# Usage:
# ./bin/bump-version.sh 1.3.5
# ./bin/bump-version.sh patch # auto-increment patch
# ./bin/bump-version.sh minor # auto-increment minor
# ./bin/bump-version.sh major # auto-increment major
set -euo pipefail
REPO_ROOT="${CLAUDE_PLUGIN_ROOT:-$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)}"
VERSION_FILES=(
"${REPO_ROOT}/pyproject.toml"
"${REPO_ROOT}/.claude-plugin/plugin.json"
"${REPO_ROOT}/.codex-plugin/plugin.json"
"${REPO_ROOT}/.gemini-extension.json"
)
# Resolve gemini path (handles both naming conventions)
if [ -f "${REPO_ROOT}/gemini-extension.json" ]; then
VERSION_FILES[3]="${REPO_ROOT}/gemini-extension.json"
fi
current_version() {
grep -m1 '"version"' "${REPO_ROOT}/.claude-plugin/plugin.json" \
| sed 's/.*"version": "\(.*\)".*/\1/'
}
bump() {
local version="$1" part="$2"
local major minor patch
IFS='.' read -r major minor patch <<< "$version"
case "$part" in
major) echo "$((major + 1)).0.0" ;;
minor) echo "${major}.$((minor + 1)).0" ;;
patch) echo "${major}.${minor}.$((patch + 1))" ;;
esac
}
# Resolve new version
ARG="${1:-}"
CURRENT="$(current_version)"
case "$ARG" in
major|minor|patch) NEW="$(bump "$CURRENT" "$ARG")" ;;
"") echo "Usage: $0 <version|major|minor|patch>"; exit 1 ;;
*) NEW="$ARG" ;;
esac
echo "Bumping $CURRENT$NEW"
for file in "${VERSION_FILES[@]}"; do
[ -f "$file" ] || { echo " skip (not found): $file"; continue; }
sed -i "s/\"version\": \"${CURRENT}\"/\"version\": \"${NEW}\"/" "$file"
sed -i "s/^version = \"${CURRENT}\"/version = \"${NEW}\"/" "$file"
echo " updated: ${file#"${REPO_ROOT}/"}"
done
echo "Done. Don't forget to add a CHANGELOG.md entry for ${NEW}."

145
bin/check-docker-security.sh Executable file
View file

@ -0,0 +1,145 @@
#!/usr/bin/env bash
# check-docker-security.sh — Verify Dockerfile follows plugin security conventions
# Run standalone: bash scripts/check-docker-security.sh [path/to/Dockerfile]
# Run in pre-commit: add as a hook (see .pre-commit-config.yaml example in plugin-setup-guide)
#
# Checks:
# 1. Multi-stage build (separate builder + runtime stages)
# 2. Non-root user (USER 1000:1000 or ${PUID}:${PGID})
# 3. No sensitive ENV directives baked into the image
# 4. HEALTHCHECK present
set -euo pipefail
PASS=0
FAIL=0
WARN=0
pass() { echo " ✓ PASS: $1"; PASS=$((PASS + 1)); }
fail() { echo " ✗ FAIL: $1$2"; FAIL=$((FAIL + 1)); }
warn() { echo " ⚠ WARN: $1$2"; WARN=$((WARN + 1)); }
# Find Dockerfile
DOCKERFILE="${1:-Dockerfile}"
if [[ ! -f "$DOCKERFILE" ]]; then
echo "Error: $DOCKERFILE not found" >&2
exit 1
fi
echo "=== Docker Security Check: $DOCKERFILE ==="
# ── 1. Multi-stage build ─────────────────────────────────────────────────────
FROM_COUNT=$(grep -cE '^FROM\s' "$DOCKERFILE" || true)
if [[ "$FROM_COUNT" -ge 2 ]]; then
pass "Multi-stage build ($FROM_COUNT stages)"
else
fail "Multi-stage build" "Found $FROM_COUNT FROM directive(s) — need at least 2 (builder + runtime)"
fi
# Check for named stages
if grep -qE '^FROM\s.+\sAS\s+builder' "$DOCKERFILE"; then
pass "Named builder stage"
else
warn "Named builder stage" "No 'FROM ... AS builder' found — recommend naming stages"
fi
if grep -qE '^FROM\s.+\sAS\s+runtime' "$DOCKERFILE"; then
pass "Named runtime stage"
else
warn "Named runtime stage" "No 'FROM ... AS runtime' found — recommend naming stages"
fi
# ── 2. Non-root user ─────────────────────────────────────────────────────────
# Check for USER directive
if grep -qE '^USER\s' "$DOCKERFILE"; then
USER_LINE=$(grep -E '^USER\s' "$DOCKERFILE" | tail -1)
USER_VALUE=$(echo "$USER_LINE" | sed 's/^USER\s*//')
# Check for 1000:1000 or variable-based UID:GID
if echo "$USER_VALUE" | grep -qE '^\$?\{?PUID|1000:1000|1000$'; then
pass "Non-root user ($USER_VALUE)"
else
warn "Non-root user" "USER is '$USER_VALUE' — expected 1000:1000 or \${PUID}:\${PGID}"
fi
else
# Check if docker-compose.yaml handles it via user: directive
if [[ -f "docker-compose.yaml" ]] && grep -qE '^\s+user:' docker-compose.yaml; then
warn "Non-root user" "No USER in Dockerfile but docker-compose.yaml sets user: — acceptable if always run via compose"
else
fail "Non-root user" "No USER directive found — container runs as root"
fi
fi
# Check there's no USER root after the runtime stage
RUNTIME_START=$(grep -nE '^FROM\s.+\sAS\s+runtime' "$DOCKERFILE" | head -1 | cut -d: -f1 || true)
if [[ -n "$RUNTIME_START" ]]; then
if tail -n +"$RUNTIME_START" "$DOCKERFILE" | grep -qE '^USER\s+root'; then
fail "No root in runtime" "USER root found after runtime stage — never run as root in production"
else
pass "No root in runtime stage"
fi
fi
# ── 3. No sensitive ENV baked in ──────────────────────────────────────────────
SENSITIVE_PATTERNS='(API_KEY|TOKEN|SECRET|PASSWORD|CREDENTIAL|PRIVATE_KEY|AUTH)'
BAKED_ENVS=$(grep -nE "^ENV\s+.*${SENSITIVE_PATTERNS}" "$DOCKERFILE" || true)
if [[ -n "$BAKED_ENVS" ]]; then
fail "No baked secrets" "Sensitive ENV directives found in Dockerfile:"
echo "$BAKED_ENVS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No baked secrets in ENV directives"
fi
# Check for ARG with defaults that look like secrets
BAKED_ARGS=$(grep -nE "^ARG\s+.*${SENSITIVE_PATTERNS}.*=" "$DOCKERFILE" || true)
if [[ -n "$BAKED_ARGS" ]]; then
warn "No baked ARG secrets" "ARG with sensitive defaults found (may leak via docker history):"
echo "$BAKED_ARGS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No baked secrets in ARG defaults"
fi
# ── 4. HEALTHCHECK ────────────────────────────────────────────────────────────
if grep -qE '^HEALTHCHECK\s' "$DOCKERFILE"; then
pass "HEALTHCHECK directive present"
if grep -qE '/health' "$DOCKERFILE"; then
pass "HEALTHCHECK uses /health endpoint"
else
warn "HEALTHCHECK endpoint" "HEALTHCHECK doesn't reference /health — ensure it matches your health endpoint"
fi
else
warn "HEALTHCHECK" "No HEALTHCHECK in Dockerfile — relying on docker-compose healthcheck only"
fi
# ── 5. Dependency layer caching ───────────────────────────────────────────────
# Check that manifest files are copied before source (for layer caching)
COPY_LINES=$(grep -nE '^COPY\s' "$DOCKERFILE" || true)
FIRST_MANIFEST_COPY=""
FIRST_SOURCE_COPY=""
while IFS= read -r line; do
linenum=$(echo "$line" | cut -d: -f1)
content=$(echo "$line" | cut -d: -f2-)
if echo "$content" | grep -qE '(pyproject\.toml|package.*\.json|Cargo\.(toml|lock)|go\.(mod|sum)|uv\.lock)'; then
[[ -z "$FIRST_MANIFEST_COPY" ]] && FIRST_MANIFEST_COPY="$linenum"
elif echo "$content" | grep -qE '\.\s+\.|src/|lib/'; then
[[ -z "$FIRST_SOURCE_COPY" ]] && FIRST_SOURCE_COPY="$linenum"
fi
done <<< "$COPY_LINES"
if [[ -n "$FIRST_MANIFEST_COPY" && -n "$FIRST_SOURCE_COPY" ]]; then
if [[ "$FIRST_MANIFEST_COPY" -lt "$FIRST_SOURCE_COPY" ]]; then
pass "Dependency manifest copied before source (layer caching)"
else
warn "Layer caching" "Source copied before dependency manifest — swap order for better Docker layer caching"
fi
fi
# ── Summary ───────────────────────────────────────────────────────────────────
echo
echo "Results: $PASS passed, $FAIL failed, $WARN warnings"
[[ "$FAIL" -eq 0 ]] && echo "DOCKER SECURITY CHECK PASSED" && exit 0
echo "DOCKER SECURITY CHECK FAILED" && exit 1

138
bin/check-no-baked-env.sh Executable file
View file

@ -0,0 +1,138 @@
#!/usr/bin/env bash
# check-no-baked-env.sh — Verify env vars aren't baked into Docker artifacts
# Run standalone: bash scripts/check-no-baked-env.sh [project-dir]
# Run in pre-commit: add as a hook (see .pre-commit-config.yaml example in plugin-setup-guide)
#
# Checks:
# 1. docker-compose.yaml has no `environment:` block (all config via env_file only)
# 2. Dockerfile has no ENV with real/sensitive values
# 3. No hardcoded URLs, tokens, or credentials in Dockerfile or docker-compose.yaml
set -euo pipefail
PROJECT_DIR="${1:-.}"
PASS=0
FAIL=0
WARN=0
pass() { echo " ✓ PASS: $1"; PASS=$((PASS + 1)); }
fail() { echo " ✗ FAIL: $1$2"; FAIL=$((FAIL + 1)); }
warn() { echo " ⚠ WARN: $1$2"; WARN=$((WARN + 1)); }
echo "=== No Baked Env Vars Check: $PROJECT_DIR ==="
# ── 1. docker-compose.yaml — no environment: block ───────────────────────────
COMPOSE_FILE="$PROJECT_DIR/docker-compose.yaml"
if [[ -f "$COMPOSE_FILE" ]]; then
# Check for environment: key under services
if grep -qE '^\s+environment:' "$COMPOSE_FILE"; then
fail "No environment: block in docker-compose.yaml" \
"Found 'environment:' block — all env vars must come from env_file: .env only"
echo " Offending lines:"
grep -nE '^\s+environment:|^\s+-\s+\w+=' "$COMPOSE_FILE" | head -10 | while IFS= read -r line; do
echo " $line"
done
echo
echo " Fix: Remove the environment: block entirely."
echo " Add all variables to .env and .env.example instead."
echo " docker-compose.yaml should only use 'env_file: .env'"
else
pass "No environment: block in docker-compose.yaml"
fi
# Verify env_file is present
if grep -qE '^\s+env_file:' "$COMPOSE_FILE"; then
pass "env_file: directive present"
else
fail "env_file: directive" "No env_file: found — services won't receive credentials"
fi
# Check for hardcoded values in compose environment blocks (not variable references)
# Filter: lines that set KEY=VALUE where VALUE doesn't start with $ (variable ref)
HARDCODED=$(grep -nE '^\s+-\s+\w+=[^$]' "$COMPOSE_FILE" | grep -vE '=(true|false)$' || true)
if [[ -n "$HARDCODED" ]]; then
# Filter out known safe patterns
SUSPICIOUS=$(echo "$HARDCODED" | grep -vE '(build:|image:|container_name:|restart:|test:|interval:|timeout:|retries:|start_period:|memory:|cpus:|name:)' || true)
if [[ -n "$SUSPICIOUS" ]]; then
warn "Hardcoded values in compose" "Found potentially hardcoded values:"
echo "$SUSPICIOUS" | head -5 | while IFS= read -r line; do
echo " $line"
done
fi
fi
else
warn "docker-compose.yaml" "File not found at $COMPOSE_FILE — skipping compose checks"
fi
# ── 2. Dockerfile — no sensitive ENV values ───────────────────────────────────
DOCKERFILE="$PROJECT_DIR/Dockerfile"
if [[ -f "$DOCKERFILE" ]]; then
# Sensitive patterns that should never be in ENV
SENSITIVE_RE='(API_KEY|TOKEN|SECRET|PASSWORD|CREDENTIAL|PRIVATE_KEY|AUTH_TOKEN|BEARER)'
# Check ENV directives for sensitive variable names with values
SENSITIVE_ENVS=$(grep -nE "^ENV\s+\S*${SENSITIVE_RE}\S*\s*=" "$DOCKERFILE" || true)
if [[ -n "$SENSITIVE_ENVS" ]]; then
fail "No sensitive ENV in Dockerfile" "Found ENV directives with sensitive variable names:"
echo "$SENSITIVE_ENVS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No sensitive ENV in Dockerfile"
fi
# Check for ENV with hardcoded URLs (might contain credentials)
URL_ENVS=$(grep -nE '^ENV\s+\S+\s*=\s*https?://' "$DOCKERFILE" || true)
if [[ -n "$URL_ENVS" ]]; then
warn "Hardcoded URLs in ENV" "Found ENV with hardcoded URLs (may contain credentials):"
echo "$URL_ENVS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No hardcoded URLs in ENV"
fi
# Check for COPY .env into image
if grep -qE '^COPY\s+.*\.env\s' "$DOCKERFILE"; then
fail "No .env in image" "Dockerfile copies .env into the image — credentials will be baked in"
else
pass "No .env copied into image"
fi
# Check .dockerignore excludes .env
DOCKERIGNORE="$PROJECT_DIR/.dockerignore"
if [[ -f "$DOCKERIGNORE" ]]; then
if grep -qE '^\s*\.env\s*$' "$DOCKERIGNORE"; then
pass ".dockerignore excludes .env"
else
fail ".dockerignore" ".env not excluded — secrets may leak into build context"
fi
else
warn ".dockerignore" "File not found — create one that excludes .env"
fi
else
warn "Dockerfile" "File not found at $DOCKERFILE — skipping Dockerfile checks"
fi
# ── 3. entrypoint.sh — no hardcoded credentials ──────────────────────────────
ENTRYPOINT="$PROJECT_DIR/entrypoint.sh"
if [[ -f "$ENTRYPOINT" ]]; then
CRED_PATTERNS='(password|secret|token|api.key)\s*=\s*["\x27][^$]'
HARDCODED_CREDS=$(grep -inE "$CRED_PATTERNS" "$ENTRYPOINT" || true)
if [[ -n "$HARDCODED_CREDS" ]]; then
fail "No hardcoded creds in entrypoint.sh" "Found suspicious hardcoded values:"
echo "$HARDCODED_CREDS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No hardcoded credentials in entrypoint.sh"
fi
else
# entrypoint.sh is optional
true
fi
# ── Summary ───────────────────────────────────────────────────────────────────
echo
echo "Results: $PASS passed, $FAIL failed, $WARN warnings"
[[ "$FAIL" -eq 0 ]] && echo "NO BAKED ENV CHECK PASSED" && exit 0
echo "NO BAKED ENV CHECK FAILED" && exit 1

181
bin/check-outdated-deps.sh Executable file
View file

@ -0,0 +1,181 @@
#!/usr/bin/env bash
# check-outdated-deps.sh — Report outdated dependencies for Python/TypeScript/Rust projects
# Run standalone: bash scripts/check-outdated-deps.sh [project-dir]
#
# Auto-detects language from manifest files and reports outdated packages.
# Exit code: 0 = all current, 1 = outdated found, 2 = tool error
#
# Not recommended for pre-commit (requires network, slow). Run periodically or in CI.
set -euo pipefail
PROJECT_DIR="${1:-.}"
FOUND_OUTDATED=0
CHECKED=0
echo "=== Outdated Dependencies Check: $PROJECT_DIR ==="
echo
# ── Python (uv) ──────────────────────────────────────────────────────────────
if [[ -f "$PROJECT_DIR/pyproject.toml" ]]; then
CHECKED=$((CHECKED + 1))
echo "── Python (uv) ──"
if command -v uv &>/dev/null; then
# Check if lock file is current
if [[ -f "$PROJECT_DIR/uv.lock" ]]; then
if (cd "$PROJECT_DIR" && uv lock --check 2>/dev/null); then
echo " ✓ uv.lock is up to date"
else
echo " ⚠ uv.lock is out of sync with pyproject.toml — run 'uv lock'"
FOUND_OUTDATED=1
fi
fi
# Show outdated packages
echo " Checking for outdated packages..."
OUTDATED=$(cd "$PROJECT_DIR" && uv pip list --outdated 2>/dev/null || true)
if [[ -n "$OUTDATED" && "$OUTDATED" != *"No outdated packages"* ]]; then
LINE_COUNT=$(echo "$OUTDATED" | wc -l)
if [[ "$LINE_COUNT" -gt 2 ]]; then # Header lines
echo "$OUTDATED" | head -20
FOUND_OUTDATED=1
else
echo " ✓ All Python packages are current"
fi
else
echo " ✓ All Python packages are current"
fi
# Check pyproject.toml for pinned versions that may be outdated
echo " Checking pyproject.toml dependency pins..."
PINNED=$(grep -E '^\s*"[^"]+==\d' "$PROJECT_DIR/pyproject.toml" 2>/dev/null || true)
if [[ -n "$PINNED" ]]; then
echo " ⚠ Found exact-pinned dependencies (consider using >= or ~=):"
echo "$PINNED" | head -10 | while IFS= read -r line; do
echo " $line"
done
fi
else
echo " ⚠ uv not found — install with: curl -LsSf https://astral.sh/uv/install.sh | sh"
fi
echo
fi
# ── TypeScript / JavaScript (npm) ────────────────────────────────────────────
if [[ -f "$PROJECT_DIR/package.json" ]]; then
CHECKED=$((CHECKED + 1))
echo "── TypeScript / JavaScript ──"
if command -v npm &>/dev/null; then
echo " Checking for outdated packages..."
OUTDATED=$(cd "$PROJECT_DIR" && npm outdated --json 2>/dev/null || true)
if [[ -n "$OUTDATED" && "$OUTDATED" != "{}" ]]; then
# Parse JSON output for readable display
echo "$OUTDATED" | python3 -c "
import json, sys
try:
data = json.load(sys.stdin)
if data:
print(f' Found {len(data)} outdated package(s):')
print(f' {\"Package\":<30} {\"Current\":<15} {\"Wanted\":<15} {\"Latest\":<15}')
print(f' {\"─\"*30} {\"─\"*15} {\"─\"*15} {\"─\"*15}')
for pkg, info in sorted(data.items()):
current = info.get('current', '?')
wanted = info.get('wanted', '?')
latest = info.get('latest', '?')
marker = ' ← MAJOR' if current.split('.')[0] != latest.split('.')[0] else ''
print(f' {pkg:<30} {current:<15} {wanted:<15} {latest:<15}{marker}')
except (json.JSONDecodeError, KeyError):
print(' ⚠ Could not parse npm outdated output')
" 2>/dev/null || echo " ⚠ Could not parse npm outdated output"
FOUND_OUTDATED=1
else
echo " ✓ All npm packages are current"
fi
# Check for npm audit vulnerabilities
echo " Checking for known vulnerabilities..."
AUDIT=$(cd "$PROJECT_DIR" && npm audit --json 2>/dev/null || true)
VULN_COUNT=$(echo "$AUDIT" | python3 -c "
import json, sys
try:
data = json.load(sys.stdin)
total = data.get('metadata', {}).get('vulnerabilities', {})
count = sum(v for k, v in total.items() if k != 'info')
print(count)
except:
print(0)
" 2>/dev/null || echo "0")
if [[ "$VULN_COUNT" -gt 0 ]]; then
echo " ⚠ Found $VULN_COUNT known vulnerabilities — run 'npm audit' for details"
else
echo " ✓ No known vulnerabilities"
fi
else
echo " ⚠ npm not found"
fi
echo
fi
# ── Rust (cargo) ──────────────────────────────────────────────────────────────
if [[ -f "$PROJECT_DIR/Cargo.toml" ]]; then
CHECKED=$((CHECKED + 1))
echo "── Rust (cargo) ──"
if command -v cargo &>/dev/null; then
# Check if cargo-outdated is installed
if cargo outdated --version &>/dev/null 2>&1; then
echo " Checking for outdated crates..."
OUTDATED=$(cd "$PROJECT_DIR" && cargo outdated --root-deps-only 2>/dev/null || true)
if echo "$OUTDATED" | grep -qE '^\w'; then
echo "$OUTDATED" | head -20
FOUND_OUTDATED=1
else
echo " ✓ All Rust crates are current"
fi
else
echo " ⚠ cargo-outdated not installed — install with: cargo install cargo-outdated"
echo " Falling back to Cargo.lock age check..."
if [[ -f "$PROJECT_DIR/Cargo.lock" ]]; then
LOCK_AGE_DAYS=$(( ($(date +%s) - $(stat -c %Y "$PROJECT_DIR/Cargo.lock")) / 86400 ))
if [[ "$LOCK_AGE_DAYS" -gt 30 ]]; then
echo " ⚠ Cargo.lock is $LOCK_AGE_DAYS days old — consider running 'cargo update'"
else
echo " ✓ Cargo.lock updated within last 30 days ($LOCK_AGE_DAYS days ago)"
fi
fi
fi
# Check for cargo audit
if cargo audit --version &>/dev/null 2>&1; then
echo " Checking for known vulnerabilities..."
if (cd "$PROJECT_DIR" && cargo audit --quiet 2>/dev/null); then
echo " ✓ No known vulnerabilities"
else
echo " ⚠ Vulnerabilities found — run 'cargo audit' for details"
FOUND_OUTDATED=1
fi
else
echo " ⚠ cargo-audit not installed — install with: cargo install cargo-audit"
fi
else
echo " ⚠ cargo not found"
fi
echo
fi
# ── Summary ───────────────────────────────────────────────────────────────────
if [[ "$CHECKED" -eq 0 ]]; then
echo "No recognized project manifests found (pyproject.toml, package.json, Cargo.toml)"
exit 2
fi
echo "=== Summary ==="
if [[ "$FOUND_OUTDATED" -eq 0 ]]; then
echo "All dependencies are current across $CHECKED project(s)."
exit 0
else
echo "Outdated dependencies found. Review above and update as needed."
exit 1
fi

267
bin/ensure-ignore-files.sh Executable file
View file

@ -0,0 +1,267 @@
#!/usr/bin/env bash
# ensure-ignore-files.sh — Ensure .gitignore and .dockerignore have all required patterns
#
# Modes:
# (default) Append missing patterns to the files (SessionStart hook)
# --check Report missing patterns and exit non-zero if any are missing (pre-commit/CI)
#
# Usage:
# bash scripts/ensure-ignore-files.sh [--check] [project-dir]
#
# As a plugin hook:
# "command": "${CLAUDE_PLUGIN_ROOT}/hooks/scripts/ensure-ignore-files.sh"
set -euo pipefail
CHECK_MODE=false
if [[ "${1:-}" == "--check" ]]; then
CHECK_MODE=true
shift
fi
PROJECT_DIR="${1:-${CLAUDE_PLUGIN_ROOT:-.}}"
PASS=0
FAIL=0
WARN=0
pass() { PASS=$((PASS + 1)); if $CHECK_MODE; then echo "$1"; fi; }
fail() { FAIL=$((FAIL + 1)); echo " ✗ FAIL: $1$2"; }
warn() { WARN=$((WARN + 1)); if $CHECK_MODE; then echo " ⚠ WARN: $1$2"; fi; }
ensure_pattern() {
local file="$1"
local pattern="$2"
local label="$3"
if grep -qxF "$pattern" "$file" 2>/dev/null; then
pass "$label: '$pattern'"
elif $CHECK_MODE; then
fail "$label: '$pattern'" "missing"
else
echo "$pattern" >> "$file"
pass "$label: '$pattern' (added)"
fi
}
# ═══════════════════════════════════════════════════════════════════════════════
# .gitignore — full required pattern list from plugin-setup-guide
# ═══════════════════════════════════════════════════════════════════════════════
GITIGNORE="$PROJECT_DIR/.gitignore"
if $CHECK_MODE; then echo "=== Ignore Files Check: $PROJECT_DIR ==="; echo "── .gitignore ──"; fi
if [[ ! -f "$GITIGNORE" ]] && $CHECK_MODE; then
fail ".gitignore" "File not found — every plugin repo must have a .gitignore"
else
touch "$GITIGNORE"
# ── Secrets ──
REQUIRED_GIT=(
".env"
".env.*"
"!.env.example"
)
# ── Runtime / hook artifacts ──
REQUIRED_GIT+=(
"*.log"
)
# ── Claude Code / AI tooling ──
REQUIRED_GIT+=(
".claude/settings.local.json"
".claude/worktrees/"
".omc/"
".lavra/"
".beads/"
".serena/"
".worktrees"
".full-review/"
".full-review-archive-*"
)
# ── IDE / editor ──
REQUIRED_GIT+=(
".vscode/"
".cursor/"
".windsurf/"
".1code/"
)
# ── Caches ──
REQUIRED_GIT+=(
".cache/"
)
# ── Documentation artifacts ──
REQUIRED_GIT+=(
"docs/plans/"
"docs/sessions/"
"docs/reports/"
"docs/research/"
"docs/superpowers/"
)
for pattern in "${REQUIRED_GIT[@]}"; do
ensure_pattern "$GITIGNORE" "$pattern" ".gitignore"
done
# ── Language-specific (check only, don't auto-add — user must uncomment) ──
if $CHECK_MODE; then
if [[ -f "$PROJECT_DIR/pyproject.toml" ]]; then
echo " Detected: Python project"
for p in ".venv/" "__pycache__/" "*.py[oc]" "*.egg-info/" "dist/" "build/"; do
if grep -qxF "$p" "$GITIGNORE" 2>/dev/null; then
pass ".gitignore (Python): '$p'"
else
warn ".gitignore (Python)" "'$p' not found — uncomment Python section"
fi
done
fi
if [[ -f "$PROJECT_DIR/package.json" ]]; then
echo " Detected: TypeScript/JavaScript project"
for p in "node_modules/" "dist/" "build/"; do
if grep -qxF "$p" "$GITIGNORE" 2>/dev/null; then
pass ".gitignore (TypeScript): '$p'"
else
warn ".gitignore (TypeScript)" "'$p' not found — uncomment TS section"
fi
done
fi
if [[ -f "$PROJECT_DIR/Cargo.toml" ]]; then
echo " Detected: Rust project"
for p in "target/"; do
if grep -qxF "$p" "$GITIGNORE" 2>/dev/null; then
pass ".gitignore (Rust): '$p'"
else
warn ".gitignore (Rust)" "'$p' not found — uncomment Rust section"
fi
done
fi
# Verify .env.example is NOT ignored
if git -C "$PROJECT_DIR" check-ignore .env.example > /dev/null 2>&1; then
fail ".gitignore" ".env.example is being ignored — '!.env.example' must come after '.env.*'"
else
pass ".gitignore: .env.example is tracked (not ignored)"
fi
fi
fi
# ═══════════════════════════════════════════════════════════════════════════════
# .dockerignore — full required pattern list from plugin-setup-guide
# ═══════════════════════════════════════════════════════════════════════════════
DOCKERIGNORE="$PROJECT_DIR/.dockerignore"
# Skip if no Dockerfile
if [[ ! -f "$PROJECT_DIR/Dockerfile" ]]; then
if $CHECK_MODE; then echo; echo "── .dockerignore ──"; echo " No Dockerfile found — skipping"; fi
else
if $CHECK_MODE; then echo; echo "── .dockerignore ──"; fi
if [[ ! -f "$DOCKERIGNORE" ]] && $CHECK_MODE; then
fail ".dockerignore" "File not found — required when Dockerfile exists"
else
touch "$DOCKERIGNORE"
# ── Version control ──
REQUIRED_DOCKER=(
".git"
".github"
)
# ── Secrets ──
REQUIRED_DOCKER+=(
".env"
".env.*"
"!.env.example"
)
# ── Claude Code / AI tooling ──
REQUIRED_DOCKER+=(
".claude"
".claude-plugin"
".codex-plugin"
".omc"
".lavra"
".beads"
".serena"
".worktrees"
".full-review"
".full-review-archive-*"
)
# ── IDE / editor ──
REQUIRED_DOCKER+=(
".vscode"
".cursor"
".windsurf"
".1code"
)
# ── Docs, tests, scripts — not needed at runtime ──
REQUIRED_DOCKER+=(
"docs"
"tests"
"scripts"
"*.md"
"!README.md"
)
# ── Runtime artifacts ──
REQUIRED_DOCKER+=(
"logs"
"backups"
"*.log"
".cache"
)
for pattern in "${REQUIRED_DOCKER[@]}"; do
ensure_pattern "$DOCKERIGNORE" "$pattern" ".dockerignore"
done
# ── Language-specific (check only) ──
if $CHECK_MODE; then
if [[ -f "$PROJECT_DIR/pyproject.toml" ]]; then
for p in ".venv" "__pycache__/" "*.py[oc]" "*.egg-info" "dist/"; do
if grep -qxF "$p" "$DOCKERIGNORE" 2>/dev/null; then
pass ".dockerignore (Python): '$p'"
else
warn ".dockerignore (Python)" "'$p' not found — uncomment Python section"
fi
done
fi
if [[ -f "$PROJECT_DIR/package.json" ]]; then
for p in "node_modules/" "dist/" "coverage/"; do
if grep -qxF "$p" "$DOCKERIGNORE" 2>/dev/null; then
pass ".dockerignore (TypeScript): '$p'"
else
warn ".dockerignore (TypeScript)" "'$p' not found — uncomment TS section"
fi
done
fi
if [[ -f "$PROJECT_DIR/Cargo.toml" ]]; then
for p in "target/"; do
if grep -qxF "$p" "$DOCKERIGNORE" 2>/dev/null; then
pass ".dockerignore (Rust): '$p'"
else
warn ".dockerignore (Rust)" "'$p' not found — uncomment Rust section"
fi
done
fi
fi
fi
fi
# ═══════════════════════════════════════════════════════════════════════════════
# Summary
# ═══════════════════════════════════════════════════════════════════════════════
if $CHECK_MODE; then
echo
echo "Results: $PASS passed, $FAIL failed, $WARN warnings"
[[ "$FAIL" -eq 0 ]] && echo "IGNORE FILES CHECK PASSED" && exit 0
echo "IGNORE FILES CHECK FAILED" && exit 1
fi

View file

@ -0,0 +1,860 @@
#!/usr/bin/env python3
"""Generate canonical Unraid GraphQL docs from live introspection."""
from __future__ import annotations
import argparse
import datetime as dt
import json
import os
from collections import Counter, defaultdict
from pathlib import Path
from typing import Any
import httpx
from graphql import build_client_schema, print_schema
DOCS_DIR = Path("docs/unraid")
DEFAULT_COMPLETE_OUTPUT = DOCS_DIR / "UNRAID-API-COMPLETE-REFERENCE.md"
DEFAULT_SUMMARY_OUTPUT = DOCS_DIR / "UNRAID-API-SUMMARY.md"
DEFAULT_INTROSPECTION_OUTPUT = DOCS_DIR / "UNRAID-API-INTROSPECTION.json"
DEFAULT_SCHEMA_OUTPUT = DOCS_DIR / "UNRAID-SCHEMA.graphql"
DEFAULT_CHANGES_OUTPUT = DOCS_DIR / "UNRAID-API-CHANGES.md"
LEGACY_INTROSPECTION_OUTPUT = Path("docs/unraid-api-introspection.json")
INTROSPECTION_QUERY = """
query FullIntrospection {
__schema {
queryType { name }
mutationType { name }
subscriptionType { name }
directives {
name
description
locations
args {
name
description
defaultValue
type { ...TypeRef }
}
}
types {
kind
name
description
fields(includeDeprecated: true) {
name
description
isDeprecated
deprecationReason
args {
name
description
defaultValue
type { ...TypeRef }
}
type { ...TypeRef }
}
inputFields {
name
description
defaultValue
type { ...TypeRef }
}
interfaces { kind name }
enumValues(includeDeprecated: true) {
name
description
isDeprecated
deprecationReason
}
possibleTypes { kind name }
}
}
}
fragment TypeRef on __Type {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
}
}
}
}
}
}
}
}
"""
def _clean(text: str | None) -> str:
"""Collapse multiline description text into a single line."""
if not text:
return ""
return " ".join(text.split())
def _type_to_str(type_ref: dict[str, Any] | None) -> str:
"""Render GraphQL nested type refs to SDL-like notation."""
if not type_ref:
return "Unknown"
kind = type_ref.get("kind")
if kind == "NON_NULL":
return f"{_type_to_str(type_ref.get('ofType'))}!"
if kind == "LIST":
return f"[{_type_to_str(type_ref.get('ofType'))}]"
return str(type_ref.get("name") or kind or "Unknown")
def _field_lines(field: dict[str, Any], *, is_input: bool) -> list[str]:
"""Render field/input-field markdown lines."""
lines: list[str] = []
lines.append(f"- `{field['name']}`: `{_type_to_str(field.get('type'))}`")
description = _clean(field.get("description"))
if description:
lines.append(f" - {description}")
default_value = field.get("defaultValue")
if default_value is not None:
lines.append(f" - Default: `{default_value}`")
if not is_input:
args = sorted(field.get("args") or [], key=lambda item: str(item["name"]))
if args:
lines.append(" - Arguments:")
for arg in args:
arg_line = f" - `{arg['name']}`: `{_type_to_str(arg.get('type'))}`"
if arg.get("defaultValue") is not None:
arg_line += f" (default: `{arg['defaultValue']}`)"
lines.append(arg_line)
arg_description = _clean(arg.get("description"))
if arg_description:
lines.append(f" - {arg_description}")
if field.get("isDeprecated"):
reason = _clean(field.get("deprecationReason"))
lines.append(f" - Deprecated: {reason}" if reason else " - Deprecated")
return lines
def _build_markdown(
schema: dict[str, Any],
*,
include_introspection: bool,
source: str,
generated_at: str,
) -> str:
"""Build full Markdown schema reference."""
all_types = schema.get("types") or []
types = [
item
for item in all_types
if item.get("name") and (include_introspection or not str(item["name"]).startswith("__"))
]
types_by_name = {str(item["name"]): item for item in types}
kind_counts = Counter(str(item.get("kind", "UNKNOWN")) for item in types)
directives = sorted(schema.get("directives") or [], key=lambda item: str(item["name"]))
implements_map: dict[str, list[str]] = defaultdict(list)
for item in types:
for interface in item.get("interfaces") or []:
interface_name = interface.get("name")
if interface_name:
implements_map[str(interface_name)].append(str(item["name"]))
query_root = (schema.get("queryType") or {}).get("name")
mutation_root = (schema.get("mutationType") or {}).get("name")
subscription_root = (schema.get("subscriptionType") or {}).get("name")
lines: list[str] = []
lines.append("# Unraid GraphQL API Complete Schema Reference")
lines.append("")
lines.append(f"> Generated from live GraphQL introspection on {generated_at}")
lines.append(f"> Source: {source}")
lines.append("")
lines.append("This is permission-scoped: it contains everything visible to the API key used.")
lines.append("")
lines.append("## Table of Contents")
lines.append("- [Schema Summary](#schema-summary)")
lines.append("- [Root Operations](#root-operations)")
lines.append("- [Directives](#directives)")
lines.append("- [All Types (Alphabetical)](#all-types-alphabetical)")
lines.append("")
lines.append("## Schema Summary")
lines.append(f"- Query root: `{query_root}`")
lines.append(f"- Mutation root: `{mutation_root}`")
lines.append(f"- Subscription root: `{subscription_root}`")
lines.append(f"- Total types: **{len(types)}**")
lines.append(f"- Total directives: **{len(directives)}**")
lines.append("- Type kinds:")
lines.extend(f"- `{kind}`: {kind_counts[kind]}" for kind in sorted(kind_counts))
lines.append("")
def render_root(root_name: str | None, label: str) -> None:
lines.append(f"### {label}")
if not root_name or root_name not in types_by_name:
lines.append("Not exposed.")
lines.append("")
return
root_type = types_by_name[root_name]
fields = sorted(root_type.get("fields") or [], key=lambda item: str(item["name"]))
lines.append(f"Total fields: **{len(fields)}**")
lines.append("")
for field in fields:
args = sorted(field.get("args") or [], key=lambda item: str(item["name"]))
arg_signature: list[str] = []
for arg in args:
part = f"{arg['name']}: {_type_to_str(arg.get('type'))}"
if arg.get("defaultValue") is not None:
part += f" = {arg['defaultValue']}"
arg_signature.append(part)
signature = (
f"{field['name']}({', '.join(arg_signature)})"
if arg_signature
else f"{field['name']}()"
)
lines.append(f"- `{signature}: {_type_to_str(field.get('type'))}`")
description = _clean(field.get("description"))
if description:
lines.append(f" - {description}")
if field.get("isDeprecated"):
reason = _clean(field.get("deprecationReason"))
lines.append(f" - Deprecated: {reason}" if reason else " - Deprecated")
lines.append("")
lines.append("## Root Operations")
render_root(query_root, "Queries")
render_root(mutation_root, "Mutations")
render_root(subscription_root, "Subscriptions")
lines.append("## Directives")
if not directives:
lines.append("No directives exposed.")
lines.append("")
else:
for directive in directives:
lines.append(f"### `@{directive['name']}`")
description = _clean(directive.get("description"))
if description:
lines.append(description)
lines.append("")
locations = directive.get("locations") or []
lines.append(
f"- Locations: {', '.join(f'`{item}`' for item in locations) if locations else 'None'}"
)
args = sorted(directive.get("args") or [], key=lambda item: str(item["name"]))
if args:
lines.append("- Arguments:")
for arg in args:
line = f" - `{arg['name']}`: `{_type_to_str(arg.get('type'))}`"
if arg.get("defaultValue") is not None:
line += f" (default: `{arg['defaultValue']}`)"
lines.append(line)
arg_description = _clean(arg.get("description"))
if arg_description:
lines.append(f" - {arg_description}")
lines.append("")
lines.append("## All Types (Alphabetical)")
for item in sorted(types, key=lambda row: str(row["name"])):
name = str(item["name"])
kind = str(item["kind"])
lines.append(f"### `{name}` ({kind})")
description = _clean(item.get("description"))
if description:
lines.append(description)
lines.append("")
if kind == "OBJECT":
interfaces = sorted(
str(interface["name"])
for interface in (item.get("interfaces") or [])
if interface.get("name")
)
if interfaces:
lines.append(f"- Implements: {', '.join(f'`{value}`' for value in interfaces)}")
fields = sorted(item.get("fields") or [], key=lambda row: str(row["name"]))
lines.append(f"- Fields ({len(fields)}):")
if fields:
for field in fields:
lines.extend(_field_lines(field, is_input=False))
else:
lines.append("- None")
elif kind == "INPUT_OBJECT":
fields = sorted(item.get("inputFields") or [], key=lambda row: str(row["name"]))
lines.append(f"- Input fields ({len(fields)}):")
if fields:
for field in fields:
lines.extend(_field_lines(field, is_input=True))
else:
lines.append("- None")
elif kind == "ENUM":
enum_values = sorted(item.get("enumValues") or [], key=lambda row: str(row["name"]))
lines.append(f"- Enum values ({len(enum_values)}):")
if enum_values:
for enum_value in enum_values:
lines.append(f" - `{enum_value['name']}`")
enum_description = _clean(enum_value.get("description"))
if enum_description:
lines.append(f" - {enum_description}")
if enum_value.get("isDeprecated"):
reason = _clean(enum_value.get("deprecationReason"))
lines.append(
f" - Deprecated: {reason}" if reason else " - Deprecated"
)
else:
lines.append("- None")
elif kind == "INTERFACE":
fields = sorted(item.get("fields") or [], key=lambda row: str(row["name"]))
lines.append(f"- Interface fields ({len(fields)}):")
if fields:
for field in fields:
lines.extend(_field_lines(field, is_input=False))
else:
lines.append("- None")
implementers = sorted(implements_map.get(name, []))
if implementers:
lines.append(
f"- Implemented by ({len(implementers)}): "
+ ", ".join(f"`{value}`" for value in implementers)
)
else:
lines.append("- Implemented by (0): None")
elif kind == "UNION":
possible_types = sorted(
str(possible["name"])
for possible in (item.get("possibleTypes") or [])
if possible.get("name")
)
if possible_types:
lines.append(
f"- Possible types ({len(possible_types)}): "
+ ", ".join(f"`{value}`" for value in possible_types)
)
else:
lines.append("- Possible types (0): None")
elif kind == "SCALAR":
lines.append("- Scalar type")
else:
lines.append("- Unhandled type kind")
lines.append("")
return "\n".join(lines).rstrip() + "\n"
def _visible_types(
schema: dict[str, Any], *, include_introspection: bool = False
) -> list[dict[str, Any]]:
"""Return visible types from the schema."""
types = schema.get("types") or []
return [
item
for item in types
if item.get("name") and (include_introspection or not str(item["name"]).startswith("__"))
]
def _types_by_name(
schema: dict[str, Any], *, include_introspection: bool = False
) -> dict[str, dict[str, Any]]:
"""Map visible types by name."""
return {
str(item["name"]): item
for item in _visible_types(schema, include_introspection=include_introspection)
}
def _field_signature(field: dict[str, Any]) -> str:
"""Render a stable field signature for change detection."""
args = sorted(field.get("args") or [], key=lambda item: str(item["name"]))
rendered_args = []
for arg in args:
arg_sig = f"{arg['name']}: {_type_to_str(arg.get('type'))}"
if arg.get("defaultValue") is not None:
arg_sig += f" = {arg['defaultValue']}"
rendered_args.append(arg_sig)
args_section = f"({', '.join(rendered_args)})" if rendered_args else "()"
return f"{field['name']}{args_section}: {_type_to_str(field.get('type'))}"
def _input_field_signature(field: dict[str, Any]) -> str:
"""Render a stable input field signature for change detection."""
signature = f"{field['name']}: {_type_to_str(field.get('type'))}"
if field.get("defaultValue") is not None:
signature += f" = {field['defaultValue']}"
return signature
def _enum_value_signature(enum_value: dict[str, Any]) -> str:
"""Render a stable enum value signature for change detection."""
signature = str(enum_value["name"])
if enum_value.get("isDeprecated"):
reason = _clean(enum_value.get("deprecationReason"))
signature += f" [deprecated: {reason}]" if reason else " [deprecated]"
return signature
def _root_field_names(schema: dict[str, Any], root_key: str) -> set[str]:
"""Return root field names for query/mutation/subscription."""
root_type = (schema.get(root_key) or {}).get("name")
if not root_type:
return set()
types = _types_by_name(schema)
root = types.get(str(root_type))
if not root:
return set()
return {str(field["name"]) for field in (root.get("fields") or [])}
def _type_member_signatures(type_info: dict[str, Any]) -> set[str]:
"""Return stable member signatures for a type."""
kind = str(type_info.get("kind", "UNKNOWN"))
if kind in {"OBJECT", "INTERFACE"}:
return {_field_signature(field) for field in (type_info.get("fields") or [])}
if kind == "INPUT_OBJECT":
return {_input_field_signature(field) for field in (type_info.get("inputFields") or [])}
if kind == "ENUM":
return {_enum_value_signature(value) for value in (type_info.get("enumValues") or [])}
if kind == "UNION":
return {
str(possible["name"])
for possible in (type_info.get("possibleTypes") or [])
if possible.get("name")
}
return set()
def _build_summary_markdown(
schema: dict[str, Any], *, source: str, generated_at: str, include_introspection: bool
) -> str:
"""Build condensed root-level summary markdown."""
types = _types_by_name(schema, include_introspection=include_introspection)
visible_types = _visible_types(schema, include_introspection=include_introspection)
directives = sorted(schema.get("directives") or [], key=lambda item: str(item["name"]))
kind_counts = Counter(str(item.get("kind", "UNKNOWN")) for item in visible_types)
query_root = (schema.get("queryType") or {}).get("name")
mutation_root = (schema.get("mutationType") or {}).get("name")
subscription_root = (schema.get("subscriptionType") or {}).get("name")
lines = [
"# Unraid API Introspection Summary",
"",
f"> Auto-generated from live API introspection on {generated_at}",
f"> Source: {source}",
"",
"## Table of Contents",
"",
"- [Schema Summary](#schema-summary)",
"- [Query Fields](#query-fields)",
"- [Mutation Fields](#mutation-fields)",
"- [Subscription Fields](#subscription-fields)",
"- [Type Kinds](#type-kinds)",
"",
"## Schema Summary",
f"- Query root: `{query_root}`",
f"- Mutation root: `{mutation_root}`",
f"- Subscription root: `{subscription_root}`",
f"- Total types: **{len(visible_types)}**",
f"- Total directives: **{len(directives)}**",
"",
]
def render_table(section_title: str, root_name: str | None) -> None:
lines.append(f"## {section_title}")
lines.append("")
lines.append("| Field | Return Type | Arguments |")
lines.append("|-------|-------------|-----------|")
root = types.get(str(root_name)) if root_name else None
for field in (
sorted(root.get("fields") or [], key=lambda item: str(item["name"])) if root else []
):
args = sorted(field.get("args") or [], key=lambda item: str(item["name"]))
arg_text = (
""
if not args
else ", ".join(
(
f"{arg['name']}: {_type_to_str(arg.get('type'))}"
+ (
f" (default: {arg['defaultValue']})"
if arg.get("defaultValue") is not None
else ""
)
)
for arg in args
)
)
lines.append(
f"| `{field['name']}` | `{_type_to_str(field.get('type'))}` | {arg_text} |"
)
lines.append("")
render_table("Query Fields", query_root)
render_table("Mutation Fields", mutation_root)
render_table("Subscription Fields", subscription_root)
lines.append("## Type Kinds")
lines.append("")
for kind in sorted(kind_counts):
lines.append(f"- `{kind}`: {kind_counts[kind]}") # noqa: PERF401
lines.extend(
[
"",
"## Notes",
"",
"- This summary is intentionally condensed; the full schema reference lives in `UNRAID-API-COMPLETE-REFERENCE.md`.",
"- Raw schema exports live in `UNRAID-API-INTROSPECTION.json` and `UNRAID-SCHEMA.graphql`.",
"",
]
)
return "\n".join(lines)
def _build_changes_markdown(
previous_schema: dict[str, Any] | None,
current_schema: dict[str, Any],
*,
source: str,
generated_at: str,
include_introspection: bool,
) -> str:
"""Build a schema change report from a previous introspection snapshot."""
lines = [
"# Unraid API Schema Changes",
"",
f"> Generated on {generated_at}",
f"> Source: {source}",
"",
]
if previous_schema is None:
lines.extend(
[
"No previous introspection snapshot was available, so no diff could be computed.",
"",
"The current canonical artifacts were regenerated successfully.",
"",
]
)
return "\n".join(lines)
current_types = _types_by_name(current_schema, include_introspection=include_introspection)
previous_types = _types_by_name(previous_schema, include_introspection=include_introspection)
sections = [
(
"Query fields",
_root_field_names(previous_schema, "queryType"),
_root_field_names(current_schema, "queryType"),
),
(
"Mutation fields",
_root_field_names(previous_schema, "mutationType"),
_root_field_names(current_schema, "mutationType"),
),
(
"Subscription fields",
_root_field_names(previous_schema, "subscriptionType"),
_root_field_names(current_schema, "subscriptionType"),
),
]
all_kinds = {"OBJECT", "INPUT_OBJECT", "ENUM", "INTERFACE", "UNION", "SCALAR"}
previous_by_kind = {
kind: {name for name, info in previous_types.items() if str(info.get("kind")) == kind}
for kind in all_kinds
}
current_by_kind = {
kind: {name for name, info in current_types.items() if str(info.get("kind")) == kind}
for kind in all_kinds
}
for label, old_set, new_set in sections:
added = sorted(new_set - old_set)
removed = sorted(old_set - new_set)
lines.append(f"## {label}")
lines.append("")
lines.append(f"- Added: {len(added)}")
if added:
lines.extend(f" - `{name}`" for name in added)
lines.append(f"- Removed: {len(removed)}")
if removed:
lines.extend(f" - `{name}`" for name in removed)
if not added and not removed:
lines.append("- No changes")
lines.append("")
lines.append("## Type Changes")
lines.append("")
for kind in sorted(all_kinds):
added = sorted(current_by_kind[kind] - previous_by_kind[kind])
removed = sorted(previous_by_kind[kind] - current_by_kind[kind])
if not added and not removed:
continue
lines.append(f"### {kind}")
lines.append("")
lines.append(f"- Added: {len(added)}")
if added:
lines.extend(f" - `{name}`" for name in added)
lines.append(f"- Removed: {len(removed)}")
if removed:
lines.extend(f" - `{name}`" for name in removed)
lines.append("")
changed_types: list[str] = []
for name in sorted(set(previous_types) & set(current_types)):
previous_info = previous_types[name]
current_info = current_types[name]
if str(previous_info.get("kind")) != str(current_info.get("kind")):
changed_types.append(name)
continue
if _type_member_signatures(previous_info) != _type_member_signatures(current_info):
changed_types.append(name)
lines.append("## Type Signature Changes")
lines.append("")
if not changed_types:
lines.append("No existing type signatures changed.")
lines.append("")
return "\n".join(lines)
for name in changed_types:
previous_info = previous_types[name]
current_info = current_types[name]
previous_members = _type_member_signatures(previous_info)
current_members = _type_member_signatures(current_info)
added = sorted(current_members - previous_members)
removed = sorted(previous_members - current_members)
lines.append(f"### `{name}` ({current_info.get('kind')})")
lines.append("")
lines.append(f"- Added members: {len(added)}")
if added:
lines.extend(f" - `{member}`" for member in added)
lines.append(f"- Removed members: {len(removed)}")
if removed:
lines.extend(f" - `{member}`" for member in removed)
if not added and not removed and previous_info.get("kind") != current_info.get("kind"):
lines.append(
f"- Kind changed: `{previous_info.get('kind')}` -> `{current_info.get('kind')}`"
)
lines.append("")
return "\n".join(lines)
def _extract_schema(payload: dict[str, Any]) -> dict[str, Any]:
"""Return the __schema payload or raise."""
schema = (payload.get("data") or {}).get("__schema")
if not schema:
raise SystemExit("GraphQL introspection returned no __schema payload.")
return schema
def _load_previous_schema(path: Path) -> dict[str, Any] | None:
"""Load a prior introspection snapshot if available."""
if not path.exists():
return None
payload = json.loads(path.read_text(encoding="utf-8"))
return _extract_schema(payload)
def _write_schema_graphql(path: Path, payload: dict[str, Any]) -> None:
"""Write SDL schema output."""
schema_graphql = print_schema(build_client_schema(payload["data"]))
banner = (
"# ------------------------------------------------------\n"
"# THIS FILE WAS AUTOMATICALLY GENERATED (DO NOT MODIFY)\n"
"# ------------------------------------------------------\n\n"
)
path.write_text(banner + schema_graphql.rstrip() + "\n", encoding="utf-8")
def _parse_args() -> argparse.Namespace:
"""Parse CLI args."""
parser = argparse.ArgumentParser(
description="Generate canonical Unraid GraphQL docs from introspection."
)
parser.add_argument(
"--api-url",
default=os.getenv("UNRAID_API_URL", ""),
help="GraphQL endpoint URL (default: UNRAID_API_URL env var).",
)
parser.add_argument(
"--api-key",
default=os.getenv("UNRAID_API_KEY", ""),
help="API key (default: UNRAID_API_KEY env var).",
)
parser.add_argument(
"--complete-output",
type=Path,
default=DEFAULT_COMPLETE_OUTPUT,
help=f"Full reference output path (default: {DEFAULT_COMPLETE_OUTPUT}).",
)
parser.add_argument(
"--summary-output",
type=Path,
default=DEFAULT_SUMMARY_OUTPUT,
help=f"Summary output path (default: {DEFAULT_SUMMARY_OUTPUT}).",
)
parser.add_argument(
"--introspection-output",
type=Path,
default=DEFAULT_INTROSPECTION_OUTPUT,
help=f"Introspection JSON output path (default: {DEFAULT_INTROSPECTION_OUTPUT}).",
)
parser.add_argument(
"--schema-output",
type=Path,
default=DEFAULT_SCHEMA_OUTPUT,
help=f"SDL schema output path (default: {DEFAULT_SCHEMA_OUTPUT}).",
)
parser.add_argument(
"--changes-output",
type=Path,
default=DEFAULT_CHANGES_OUTPUT,
help=f"Schema changes report path (default: {DEFAULT_CHANGES_OUTPUT}).",
)
parser.add_argument(
"--previous-introspection",
type=Path,
default=None,
help=(
"Previous introspection JSON used for diffing. Defaults to the current "
"introspection output path, falling back to the legacy docs path if present."
),
)
parser.add_argument(
"--timeout-seconds",
type=float,
default=90.0,
help="HTTP timeout in seconds (default: 90).",
)
parser.add_argument(
"--verify-ssl",
action="store_true",
help="Enable SSL cert verification. Default is disabled for local/self-signed setups.",
)
parser.add_argument(
"--include-introspection-types",
action="store_true",
help="Include __Schema/__Type/etc in the generated type list.",
)
return parser.parse_args()
def main() -> int:
"""Run generator CLI."""
args = _parse_args()
if not args.api_url:
raise SystemExit("Missing API URL. Provide --api-url or set UNRAID_API_URL.")
if not args.api_key:
raise SystemExit("Missing API key. Provide --api-key or set UNRAID_API_KEY.")
headers = {"x-api-key": args.api_key, "Content-Type": "application/json"}
with httpx.Client(timeout=args.timeout_seconds, verify=args.verify_ssl) as client:
response = client.post(args.api_url, json={"query": INTROSPECTION_QUERY}, headers=headers)
response.raise_for_status()
payload = response.json()
if payload.get("errors"):
errors = json.dumps(payload["errors"], indent=2)
raise SystemExit(f"GraphQL introspection returned errors:\n{errors}")
schema = _extract_schema(payload)
generated_at = dt.datetime.now(dt.UTC).replace(microsecond=0).isoformat()
previous_path = args.previous_introspection or (
args.introspection_output
if args.introspection_output.exists()
else LEGACY_INTROSPECTION_OUTPUT
)
previous_schema = _load_previous_schema(previous_path)
for path in {
args.complete_output,
args.summary_output,
args.introspection_output,
args.schema_output,
args.changes_output,
}:
path.parent.mkdir(parents=True, exist_ok=True)
full_reference = _build_markdown(
schema,
include_introspection=bool(args.include_introspection_types),
source=args.api_url,
generated_at=generated_at,
)
summary = _build_summary_markdown(
schema,
source=args.api_url,
generated_at=generated_at,
include_introspection=bool(args.include_introspection_types),
)
changes = _build_changes_markdown(
previous_schema,
schema,
source=args.api_url,
generated_at=generated_at,
include_introspection=bool(args.include_introspection_types),
)
args.complete_output.write_text(full_reference, encoding="utf-8")
args.summary_output.write_text(summary, encoding="utf-8")
args.introspection_output.write_text(
json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8"
)
_write_schema_graphql(args.schema_output, payload)
args.changes_output.write_text(changes, encoding="utf-8")
print(f"Wrote {args.complete_output}")
print(f"Wrote {args.summary_output}")
print(f"Wrote {args.introspection_output}")
print(f"Wrote {args.schema_output}")
print(f"Wrote {args.changes_output}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

99
bin/validate-marketplace.sh Executable file
View file

@ -0,0 +1,99 @@
#!/usr/bin/env bash
# Validate Claude Code marketplace and plugin structure
set -uo pipefail
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
NC='\033[0m' # No Color
# Counters
CHECKS=0
PASSED=0
FAILED=0
check() {
local test_name="$1"
local test_cmd="$2"
CHECKS=$((CHECKS + 1))
echo -n "Checking: $test_name... "
if eval "$test_cmd" > /dev/null 2>&1; then
echo -e "${GREEN}${NC}"
PASSED=$((PASSED + 1))
return 0
else
echo -e "${RED}${NC}"
FAILED=$((FAILED + 1))
return 1
fi
}
echo "=== Validating Claude Code Marketplace Structure ==="
echo ""
# Check marketplace manifest
check "Marketplace manifest exists" "test -f .claude-plugin/marketplace.json"
check "Marketplace manifest is valid JSON" "jq empty .claude-plugin/marketplace.json"
check "Marketplace has name" "jq -e '.name' .claude-plugin/marketplace.json"
check "Marketplace has plugins array" "jq -e '.plugins | type == \"array\"' .claude-plugin/marketplace.json"
# Check plugin manifest
check "Plugin manifest exists" "test -f .claude-plugin/plugin.json"
check "Plugin manifest is valid JSON" "jq empty .claude-plugin/plugin.json"
check "Plugin has name" "jq -e '.name' .claude-plugin/plugin.json"
check "Plugin has version" "jq -e '.version' .claude-plugin/plugin.json"
# Check plugin structure
check "Plugin has SKILL.md" "test -f skills/unraid/SKILL.md"
check "Plugin has README.md" "test -f skills/unraid/README.md"
check "Plugin has scripts directory" "test -d skills/unraid/scripts"
check "Plugin has examples directory" "test -d skills/unraid/examples"
check "Plugin has references directory" "test -d skills/unraid/references"
# Validate plugin is listed in marketplace
check "Plugin listed in marketplace" "jq -e '.plugins[] | select(.name == \"unraid\")' .claude-plugin/marketplace.json"
# Check marketplace metadata
check "Marketplace has repository" "jq -e '.repository' .claude-plugin/marketplace.json"
check "Marketplace has owner" "jq -e '.owner' .claude-plugin/marketplace.json"
# Verify source path
PLUGIN_SOURCE=$(jq -r '.plugins[]? | select(.name == "unraid") | .source // empty' .claude-plugin/marketplace.json 2>/dev/null || true)
if [ -n "$PLUGIN_SOURCE" ]; then
check "Plugin source path is valid" "test -d \"$PLUGIN_SOURCE\""
else
CHECKS=$((CHECKS + 1))
FAILED=$((FAILED + 1))
echo -e "Checking: Plugin source path is valid... ${RED}${NC} (plugin not found in marketplace)"
fi
# Check version sync between pyproject.toml and plugin.json
echo "Checking version sync..."
TOML_VER=$(grep -m1 '^version = ' pyproject.toml | sed 's/version = "//;s/"//')
PLUGIN_VER=$(python3 -c "import json; print(json.load(open('.claude-plugin/plugin.json'))['version'])" 2>/dev/null || echo "ERROR_READING")
if [ "$TOML_VER" != "$PLUGIN_VER" ]; then
echo -e "${RED}FAIL: Version mismatch — pyproject.toml=$TOML_VER, plugin.json=$PLUGIN_VER${NC}"
CHECKS=$((CHECKS + 1))
FAILED=$((FAILED + 1))
else
echo -e "${GREEN}PASS: Versions in sync ($TOML_VER)${NC}"
CHECKS=$((CHECKS + 1))
PASSED=$((PASSED + 1))
fi
echo ""
echo "=== Results ==="
echo -e "Total checks: $CHECKS"
echo -e "${GREEN}Passed: $PASSED${NC}"
if [ $FAILED -gt 0 ]; then
echo -e "${RED}Failed: $FAILED${NC}"
exit 1
else
echo -e "${GREEN}All checks passed!${NC}"
echo ""
echo "Marketplace is ready for distribution at:"
echo " $(jq -r '.repository' .claude-plugin/marketplace.json)"
fi

View file

@ -1,6 +1,6 @@
{
"name": "unraid-mcp",
"version": "1.3.4",
"version": "1.3.6",
"description": "Query, monitor, and manage Unraid servers via GraphQL API through MCP tools. Supports system info, Docker, VMs, array/parity, notifications, plugins, and live telemetry.",
"mcpServers": {
"unraid-mcp": {

View file

@ -10,7 +10,7 @@ build-backend = "hatchling.build"
# ============================================================================
[project]
name = "unraid-mcp"
version = "1.3.4"
version = "1.3.6"
description = "MCP Server for Unraid API - provides tools to interact with an Unraid server's GraphQL API"
readme = "README.md"
license = {file = "LICENSE"}

View file

@ -0,0 +1,112 @@
#!/usr/bin/env bats
# Tests for bin/bump-version.sh
#
# CLAUDE_PLUGIN_ROOT is used as the repo root override (same variable the
# plugin runtime exports to hook processes), so tests just point it at a
# temp copy of the relevant files.
SCRIPT="$(cd "$(dirname "$BATS_TEST_FILENAME")/.." && pwd)/bin/bump-version.sh"
REAL_ROOT="$(cd "$(dirname "$BATS_TEST_FILENAME")/.." && pwd)"
setup() {
TMPDIR="$(mktemp -d)"
# Copy only the version-bearing files into the temp tree
mkdir -p "${TMPDIR}/.claude-plugin" "${TMPDIR}/.codex-plugin"
cp "${REAL_ROOT}/.claude-plugin/plugin.json" "${TMPDIR}/.claude-plugin/"
cp "${REAL_ROOT}/.codex-plugin/plugin.json" "${TMPDIR}/.codex-plugin/"
cp "${REAL_ROOT}/gemini-extension.json" "${TMPDIR}/"
cp "${REAL_ROOT}/pyproject.toml" "${TMPDIR}/"
# Pin all files to a known starting version
sed -i 's/"version": "[^"]*"/"version": "9.8.7"/g' \
"${TMPDIR}/.claude-plugin/plugin.json" \
"${TMPDIR}/.codex-plugin/plugin.json" \
"${TMPDIR}/gemini-extension.json"
sed -i 's/^version = ".*"/version = "9.8.7"/' "${TMPDIR}/pyproject.toml"
# Point the script at the temp tree via the standard plugin env var
export CLAUDE_PLUGIN_ROOT="${TMPDIR}"
}
teardown() {
unset CLAUDE_PLUGIN_ROOT
rm -rf "${TMPDIR}"
}
version_in() {
grep -m1 '"version"\|^version' "$1" | grep -o '[0-9]\+\.[0-9]\+\.[0-9]\+'
}
@test "no argument prints usage and exits non-zero" {
run bash "${SCRIPT}"
[ "$status" -ne 0 ]
[[ "$output" == *"Usage"* ]]
}
@test "explicit version updates all four files" {
run bash "${SCRIPT}" 1.2.3
[ "$status" -eq 0 ]
[ "$(version_in "${TMPDIR}/.claude-plugin/plugin.json")" = "1.2.3" ]
[ "$(version_in "${TMPDIR}/.codex-plugin/plugin.json")" = "1.2.3" ]
[ "$(version_in "${TMPDIR}/gemini-extension.json")" = "1.2.3" ]
[ "$(version_in "${TMPDIR}/pyproject.toml")" = "1.2.3" ]
}
@test "patch keyword increments patch component" {
run bash "${SCRIPT}" patch
[ "$status" -eq 0 ]
[ "$(version_in "${TMPDIR}/.claude-plugin/plugin.json")" = "9.8.8" ]
[ "$(version_in "${TMPDIR}/pyproject.toml")" = "9.8.8" ]
}
@test "minor keyword increments minor and resets patch to zero" {
run bash "${SCRIPT}" minor
[ "$status" -eq 0 ]
[ "$(version_in "${TMPDIR}/.claude-plugin/plugin.json")" = "9.9.0" ]
[ "$(version_in "${TMPDIR}/pyproject.toml")" = "9.9.0" ]
}
@test "major keyword increments major and resets minor and patch" {
run bash "${SCRIPT}" major
[ "$status" -eq 0 ]
[ "$(version_in "${TMPDIR}/.claude-plugin/plugin.json")" = "10.0.0" ]
[ "$(version_in "${TMPDIR}/pyproject.toml")" = "10.0.0" ]
}
@test "all four files are updated in sync" {
run bash "${SCRIPT}" 2.0.0
[ "$status" -eq 0 ]
local claude codex gemini pyproject
claude="$(version_in "${TMPDIR}/.claude-plugin/plugin.json")"
codex="$(version_in "${TMPDIR}/.codex-plugin/plugin.json")"
gemini="$(version_in "${TMPDIR}/gemini-extension.json")"
pyproject="$(version_in "${TMPDIR}/pyproject.toml")"
[ "$claude" = "2.0.0" ]
[ "$codex" = "2.0.0" ]
[ "$gemini" = "2.0.0" ]
[ "$pyproject" = "2.0.0" ]
}
@test "output reports old and new version" {
run bash "${SCRIPT}" 1.0.0
[ "$status" -eq 0 ]
[[ "$output" == *"9.8.7"* ]]
[[ "$output" == *"1.0.0"* ]]
}
@test "output reminds about CHANGELOG" {
run bash "${SCRIPT}" 1.0.0
[ "$status" -eq 0 ]
[[ "$output" == *"CHANGELOG"* ]]
}
@test "falls back to dirname resolution when CLAUDE_PLUGIN_ROOT is unset" {
unset CLAUDE_PLUGIN_ROOT
# Running against the real repo root — just verify it exits 0 with patch
# and produces the expected output format without corrupting anything
CURRENT="$(grep -m1 '"version"' "${REAL_ROOT}/.claude-plugin/plugin.json" | grep -o '[0-9]\+\.[0-9]\+\.[0-9]\+')"
run bash "${SCRIPT}" "${CURRENT}" # bump to same version = no-op change
[ "$status" -eq 0 ]
[[ "$output" == *"${CURRENT}"* ]]
}