chore: sync working tree updates

This commit is contained in:
Jacob Magar 2026-04-15 19:13:31 -04:00
parent f4adb4e8b9
commit 454f346036
40 changed files with 286 additions and 2918 deletions

View file

@ -1,7 +1,7 @@
{
"name": "unraid-mcp",
"displayName": "Unraid MCP",
"version": "1.3.6",
"version": "1.3.8",
"description": "Query, monitor, and manage Unraid servers via GraphQL API through MCP tools. Supports system info, Docker, VMs, array/parity, notifications, plugins, rclone, and live telemetry.",
"author": {
"name": "Jacob Magar",
@ -55,7 +55,7 @@
"unraid_api_key": {
"type": "string",
"title": "Unraid API Key",
"description": "API key for your Unraid server. Found in Settings \u2192 Management Access \u2192 API Keys.",
"description": "API key for your Unraid server. Found in Settings → Management Access → API Keys.",
"sensitive": true
}
}

View file

@ -1,6 +1,6 @@
{
"name": "unraid-mcp",
"version": "1.3.6",
"version": "1.3.8",
"description": "Unraid server management via MCP.",
"homepage": "https://github.com/jmagar/unraid-mcp",
"repository": "https://github.com/jmagar/unraid-mcp",

View file

@ -43,7 +43,7 @@ coverage
target
Justfile
biome.json
.pre-commit-config.yaml
lefthook.yml
.prettierrc
.prettierignore
tests

View file

@ -102,19 +102,6 @@ jobs:
- name: Dependency audit
run: uvx pip-audit
docker-security:
name: Docker Security
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Check Dockerfile security
run: bash bin/check-docker-security.sh Dockerfile
- name: Check no baked env vars
run: bash bin/check-no-baked-env.sh .
- name: Check ignore files
run: bash bin/ensure-ignore-files.sh --check .
gitleaks:
name: Secret Scan
runs-on: ubuntu-latest

2
.gitignore vendored
View file

@ -25,7 +25,7 @@
.full-review/
.full-review-archive-*
.bivvy
scaffold-plans/
# ── IDE / editor ─────────────────────────────────────────────────────────────
.vscode/
.cursor/

View file

@ -1,31 +0,0 @@
repos:
- repo: local
hooks:
- id: skills-validate
name: Validate skills
entry: just validate-skills
language: system
pass_filenames: false
- id: docker-security
name: Docker security check
entry: bash bin/check-docker-security.sh
language: system
pass_filenames: false
- id: no-baked-env
name: No baked env vars
entry: bash bin/check-no-baked-env.sh
language: system
pass_filenames: false
- id: ensure-ignore-files
name: Ensure ignore files
entry: bash bin/ensure-ignore-files.sh --check
language: system
pass_filenames: false
- id: block-env-commits
name: Block .env file commits (allow only .env.example)
entry: bash bin/block-env-commits.sh
language: system
pass_filenames: false
files: '\.env'
exclude: '\.env\.example$'

View file

@ -7,6 +7,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
## [1.3.8] - 2026-04-15
### Changed
- Repository maintenance updates committed from the current working tree.
- Version-bearing manifests synchronized to 1.3.8.
## [1.3.7] - 2026-04-05
### Fixed

26
bin/CLAUDE.md Normal file
View file

@ -0,0 +1,26 @@
# `bin/`
This subtree contains plugin executables that should be added to `PATH` in generated Claude Code plugin repositories.
## Contract
- Put executable entrypoints here, not repo-maintenance scripts
- Keep the files shell-friendly and portable unless a specific runtime is required
- Make names stable and descriptive so they are safe to expose on `PATH`
## Expectations
- Each executable should have a shebang
- Executables should be safe to call without extra wrapper logic
- Commands should prefer deterministic behavior and clear exit codes
- If a script needs inputs, document them near the file that consumes them
## Notes for Claude Code Plugins
This subtree is specifically for plugin surfaces that Claude Code can invoke directly from the shell. Use it for generated plugin utilities such as:
- setup helpers
- validation helpers
- lightweight wrapper commands
- plugin-local tooling that needs to be discoverable on `PATH`

View file

@ -1,145 +0,0 @@
#!/usr/bin/env bash
# check-docker-security.sh — Verify Dockerfile follows plugin security conventions
# Run standalone: bash scripts/check-docker-security.sh [path/to/Dockerfile]
# Run in pre-commit: add as a hook (see .pre-commit-config.yaml example in plugin-setup-guide)
#
# Checks:
# 1. Multi-stage build (separate builder + runtime stages)
# 2. Non-root user (USER 1000:1000 or ${PUID}:${PGID})
# 3. No sensitive ENV directives baked into the image
# 4. HEALTHCHECK present
set -euo pipefail
PASS=0
FAIL=0
WARN=0
pass() { echo " ✓ PASS: $1"; PASS=$((PASS + 1)); }
fail() { echo " ✗ FAIL: $1$2"; FAIL=$((FAIL + 1)); }
warn() { echo " ⚠ WARN: $1$2"; WARN=$((WARN + 1)); }
# Find Dockerfile
DOCKERFILE="${1:-Dockerfile}"
if [[ ! -f "$DOCKERFILE" ]]; then
echo "Error: $DOCKERFILE not found" >&2
exit 1
fi
echo "=== Docker Security Check: $DOCKERFILE ==="
# ── 1. Multi-stage build ─────────────────────────────────────────────────────
FROM_COUNT=$(grep -cE '^FROM\s' "$DOCKERFILE" || true)
if [[ "$FROM_COUNT" -ge 2 ]]; then
pass "Multi-stage build ($FROM_COUNT stages)"
else
fail "Multi-stage build" "Found $FROM_COUNT FROM directive(s) — need at least 2 (builder + runtime)"
fi
# Check for named stages
if grep -qE '^FROM\s.+\sAS\s+builder' "$DOCKERFILE"; then
pass "Named builder stage"
else
warn "Named builder stage" "No 'FROM ... AS builder' found — recommend naming stages"
fi
if grep -qE '^FROM\s.+\sAS\s+runtime' "$DOCKERFILE"; then
pass "Named runtime stage"
else
warn "Named runtime stage" "No 'FROM ... AS runtime' found — recommend naming stages"
fi
# ── 2. Non-root user ─────────────────────────────────────────────────────────
# Check for USER directive
if grep -qE '^USER\s' "$DOCKERFILE"; then
USER_LINE=$(grep -E '^USER\s' "$DOCKERFILE" | tail -1)
USER_VALUE=$(echo "$USER_LINE" | sed 's/^USER\s*//')
# Check for 1000:1000 or variable-based UID:GID
if echo "$USER_VALUE" | grep -qE '^\$?\{?PUID|1000:1000|1000$'; then
pass "Non-root user ($USER_VALUE)"
else
warn "Non-root user" "USER is '$USER_VALUE' — expected 1000:1000 or \${PUID}:\${PGID}"
fi
else
# Check if docker-compose.yaml handles it via user: directive
if [[ -f "docker-compose.yaml" ]] && grep -qE '^\s+user:' docker-compose.yaml; then
warn "Non-root user" "No USER in Dockerfile but docker-compose.yaml sets user: — acceptable if always run via compose"
else
fail "Non-root user" "No USER directive found — container runs as root"
fi
fi
# Check there's no USER root after the runtime stage
RUNTIME_START=$(grep -nE '^FROM\s.+\sAS\s+runtime' "$DOCKERFILE" | head -1 | cut -d: -f1 || true)
if [[ -n "$RUNTIME_START" ]]; then
if tail -n +"$RUNTIME_START" "$DOCKERFILE" | grep -qE '^USER\s+root'; then
fail "No root in runtime" "USER root found after runtime stage — never run as root in production"
else
pass "No root in runtime stage"
fi
fi
# ── 3. No sensitive ENV baked in ──────────────────────────────────────────────
SENSITIVE_PATTERNS='(API_KEY|TOKEN|SECRET|PASSWORD|CREDENTIAL|PRIVATE_KEY|AUTH)'
BAKED_ENVS=$(grep -nE "^ENV\s+.*${SENSITIVE_PATTERNS}" "$DOCKERFILE" || true)
if [[ -n "$BAKED_ENVS" ]]; then
fail "No baked secrets" "Sensitive ENV directives found in Dockerfile:"
echo "$BAKED_ENVS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No baked secrets in ENV directives"
fi
# Check for ARG with defaults that look like secrets
BAKED_ARGS=$(grep -nE "^ARG\s+.*${SENSITIVE_PATTERNS}.*=" "$DOCKERFILE" || true)
if [[ -n "$BAKED_ARGS" ]]; then
warn "No baked ARG secrets" "ARG with sensitive defaults found (may leak via docker history):"
echo "$BAKED_ARGS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No baked secrets in ARG defaults"
fi
# ── 4. HEALTHCHECK ────────────────────────────────────────────────────────────
if grep -qE '^HEALTHCHECK\s' "$DOCKERFILE"; then
pass "HEALTHCHECK directive present"
if grep -qE '/health' "$DOCKERFILE"; then
pass "HEALTHCHECK uses /health endpoint"
else
warn "HEALTHCHECK endpoint" "HEALTHCHECK doesn't reference /health — ensure it matches your health endpoint"
fi
else
warn "HEALTHCHECK" "No HEALTHCHECK in Dockerfile — relying on docker-compose healthcheck only"
fi
# ── 5. Dependency layer caching ───────────────────────────────────────────────
# Check that manifest files are copied before source (for layer caching)
COPY_LINES=$(grep -nE '^COPY\s' "$DOCKERFILE" || true)
FIRST_MANIFEST_COPY=""
FIRST_SOURCE_COPY=""
while IFS= read -r line; do
linenum=$(echo "$line" | cut -d: -f1)
content=$(echo "$line" | cut -d: -f2-)
if echo "$content" | grep -qE '(pyproject\.toml|package.*\.json|Cargo\.(toml|lock)|go\.(mod|sum)|uv\.lock)'; then
[[ -z "$FIRST_MANIFEST_COPY" ]] && FIRST_MANIFEST_COPY="$linenum"
elif echo "$content" | grep -qE '\.\s+\.|src/|lib/'; then
[[ -z "$FIRST_SOURCE_COPY" ]] && FIRST_SOURCE_COPY="$linenum"
fi
done <<< "$COPY_LINES"
if [[ -n "$FIRST_MANIFEST_COPY" && -n "$FIRST_SOURCE_COPY" ]]; then
if [[ "$FIRST_MANIFEST_COPY" -lt "$FIRST_SOURCE_COPY" ]]; then
pass "Dependency manifest copied before source (layer caching)"
else
warn "Layer caching" "Source copied before dependency manifest — swap order for better Docker layer caching"
fi
fi
# ── Summary ───────────────────────────────────────────────────────────────────
echo
echo "Results: $PASS passed, $FAIL failed, $WARN warnings"
[[ "$FAIL" -eq 0 ]] && echo "DOCKER SECURITY CHECK PASSED" && exit 0
echo "DOCKER SECURITY CHECK FAILED" && exit 1

View file

@ -1,138 +0,0 @@
#!/usr/bin/env bash
# check-no-baked-env.sh — Verify env vars aren't baked into Docker artifacts
# Run standalone: bash scripts/check-no-baked-env.sh [project-dir]
# Run in pre-commit: add as a hook (see .pre-commit-config.yaml example in plugin-setup-guide)
#
# Checks:
# 1. docker-compose.yaml has no `environment:` block (all config via env_file only)
# 2. Dockerfile has no ENV with real/sensitive values
# 3. No hardcoded URLs, tokens, or credentials in Dockerfile or docker-compose.yaml
set -euo pipefail
PROJECT_DIR="${1:-.}"
PASS=0
FAIL=0
WARN=0
pass() { echo " ✓ PASS: $1"; PASS=$((PASS + 1)); }
fail() { echo " ✗ FAIL: $1$2"; FAIL=$((FAIL + 1)); }
warn() { echo " ⚠ WARN: $1$2"; WARN=$((WARN + 1)); }
echo "=== No Baked Env Vars Check: $PROJECT_DIR ==="
# ── 1. docker-compose.yaml — no environment: block ───────────────────────────
COMPOSE_FILE="$PROJECT_DIR/docker-compose.yaml"
if [[ -f "$COMPOSE_FILE" ]]; then
# Check for environment: key under services
if grep -qE '^\s+environment:' "$COMPOSE_FILE"; then
fail "No environment: block in docker-compose.yaml" \
"Found 'environment:' block — all env vars must come from env_file: .env only"
echo " Offending lines:"
grep -nE '^\s+environment:|^\s+-\s+\w+=' "$COMPOSE_FILE" | head -10 | while IFS= read -r line; do
echo " $line"
done
echo
echo " Fix: Remove the environment: block entirely."
echo " Add all variables to .env and .env.example instead."
echo " docker-compose.yaml should only use 'env_file: .env'"
else
pass "No environment: block in docker-compose.yaml"
fi
# Verify env_file is present
if grep -qE '^\s+env_file:' "$COMPOSE_FILE"; then
pass "env_file: directive present"
else
fail "env_file: directive" "No env_file: found — services won't receive credentials"
fi
# Check for hardcoded values in compose environment blocks (not variable references)
# Filter: lines that set KEY=VALUE where VALUE doesn't start with $ (variable ref)
HARDCODED=$(grep -nE '^\s+-\s+\w+=[^$]' "$COMPOSE_FILE" | grep -vE '=(true|false)$' || true)
if [[ -n "$HARDCODED" ]]; then
# Filter out known safe patterns
SUSPICIOUS=$(echo "$HARDCODED" | grep -vE '(build:|image:|container_name:|restart:|test:|interval:|timeout:|retries:|start_period:|memory:|cpus:|name:)' || true)
if [[ -n "$SUSPICIOUS" ]]; then
warn "Hardcoded values in compose" "Found potentially hardcoded values:"
echo "$SUSPICIOUS" | head -5 | while IFS= read -r line; do
echo " $line"
done
fi
fi
else
warn "docker-compose.yaml" "File not found at $COMPOSE_FILE — skipping compose checks"
fi
# ── 2. Dockerfile — no sensitive ENV values ───────────────────────────────────
DOCKERFILE="$PROJECT_DIR/Dockerfile"
if [[ -f "$DOCKERFILE" ]]; then
# Sensitive patterns that should never be in ENV
SENSITIVE_RE='(API_KEY|TOKEN|SECRET|PASSWORD|CREDENTIAL|PRIVATE_KEY|AUTH_TOKEN|BEARER)'
# Check ENV directives for sensitive variable names with values
SENSITIVE_ENVS=$(grep -nE "^ENV\s+\S*${SENSITIVE_RE}\S*\s*=" "$DOCKERFILE" || true)
if [[ -n "$SENSITIVE_ENVS" ]]; then
fail "No sensitive ENV in Dockerfile" "Found ENV directives with sensitive variable names:"
echo "$SENSITIVE_ENVS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No sensitive ENV in Dockerfile"
fi
# Check for ENV with hardcoded URLs (might contain credentials)
URL_ENVS=$(grep -nE '^ENV\s+\S+\s*=\s*https?://' "$DOCKERFILE" || true)
if [[ -n "$URL_ENVS" ]]; then
warn "Hardcoded URLs in ENV" "Found ENV with hardcoded URLs (may contain credentials):"
echo "$URL_ENVS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No hardcoded URLs in ENV"
fi
# Check for COPY .env into image
if grep -qE '^COPY\s+.*\.env\s' "$DOCKERFILE"; then
fail "No .env in image" "Dockerfile copies .env into the image — credentials will be baked in"
else
pass "No .env copied into image"
fi
# Check .dockerignore excludes .env
DOCKERIGNORE="$PROJECT_DIR/.dockerignore"
if [[ -f "$DOCKERIGNORE" ]]; then
if grep -qE '^\s*\.env\s*$' "$DOCKERIGNORE"; then
pass ".dockerignore excludes .env"
else
fail ".dockerignore" ".env not excluded — secrets may leak into build context"
fi
else
warn ".dockerignore" "File not found — create one that excludes .env"
fi
else
warn "Dockerfile" "File not found at $DOCKERFILE — skipping Dockerfile checks"
fi
# ── 3. entrypoint.sh — no hardcoded credentials ──────────────────────────────
ENTRYPOINT="$PROJECT_DIR/entrypoint.sh"
if [[ -f "$ENTRYPOINT" ]]; then
CRED_PATTERNS='(password|secret|token|api.key)\s*=\s*["\x27][^$]'
HARDCODED_CREDS=$(grep -inE "$CRED_PATTERNS" "$ENTRYPOINT" || true)
if [[ -n "$HARDCODED_CREDS" ]]; then
fail "No hardcoded creds in entrypoint.sh" "Found suspicious hardcoded values:"
echo "$HARDCODED_CREDS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No hardcoded credentials in entrypoint.sh"
fi
else
# entrypoint.sh is optional
true
fi
# ── Summary ───────────────────────────────────────────────────────────────────
echo
echo "Results: $PASS passed, $FAIL failed, $WARN warnings"
[[ "$FAIL" -eq 0 ]] && echo "NO BAKED ENV CHECK PASSED" && exit 0
echo "NO BAKED ENV CHECK FAILED" && exit 1

View file

@ -1,181 +0,0 @@
#!/usr/bin/env bash
# check-outdated-deps.sh — Report outdated dependencies for Python/TypeScript/Rust projects
# Run standalone: bash scripts/check-outdated-deps.sh [project-dir]
#
# Auto-detects language from manifest files and reports outdated packages.
# Exit code: 0 = all current, 1 = outdated found, 2 = tool error
#
# Not recommended for pre-commit (requires network, slow). Run periodically or in CI.
set -euo pipefail
PROJECT_DIR="${1:-.}"
FOUND_OUTDATED=0
CHECKED=0
echo "=== Outdated Dependencies Check: $PROJECT_DIR ==="
echo
# ── Python (uv) ──────────────────────────────────────────────────────────────
if [[ -f "$PROJECT_DIR/pyproject.toml" ]]; then
CHECKED=$((CHECKED + 1))
echo "── Python (uv) ──"
if command -v uv &>/dev/null; then
# Check if lock file is current
if [[ -f "$PROJECT_DIR/uv.lock" ]]; then
if (cd "$PROJECT_DIR" && uv lock --check 2>/dev/null); then
echo " ✓ uv.lock is up to date"
else
echo " ⚠ uv.lock is out of sync with pyproject.toml — run 'uv lock'"
FOUND_OUTDATED=1
fi
fi
# Show outdated packages
echo " Checking for outdated packages..."
OUTDATED=$(cd "$PROJECT_DIR" && uv pip list --outdated 2>/dev/null || true)
if [[ -n "$OUTDATED" && "$OUTDATED" != *"No outdated packages"* ]]; then
LINE_COUNT=$(echo "$OUTDATED" | wc -l)
if [[ "$LINE_COUNT" -gt 2 ]]; then # Header lines
echo "$OUTDATED" | head -20
FOUND_OUTDATED=1
else
echo " ✓ All Python packages are current"
fi
else
echo " ✓ All Python packages are current"
fi
# Check pyproject.toml for pinned versions that may be outdated
echo " Checking pyproject.toml dependency pins..."
PINNED=$(grep -E '^\s*"[^"]+==\d' "$PROJECT_DIR/pyproject.toml" 2>/dev/null || true)
if [[ -n "$PINNED" ]]; then
echo " ⚠ Found exact-pinned dependencies (consider using >= or ~=):"
echo "$PINNED" | head -10 | while IFS= read -r line; do
echo " $line"
done
fi
else
echo " ⚠ uv not found — install with: curl -LsSf https://astral.sh/uv/install.sh | sh"
fi
echo
fi
# ── TypeScript / JavaScript (npm) ────────────────────────────────────────────
if [[ -f "$PROJECT_DIR/package.json" ]]; then
CHECKED=$((CHECKED + 1))
echo "── TypeScript / JavaScript ──"
if command -v npm &>/dev/null; then
echo " Checking for outdated packages..."
OUTDATED=$(cd "$PROJECT_DIR" && npm outdated --json 2>/dev/null || true)
if [[ -n "$OUTDATED" && "$OUTDATED" != "{}" ]]; then
# Parse JSON output for readable display
echo "$OUTDATED" | python3 -c "
import json, sys
try:
data = json.load(sys.stdin)
if data:
print(f' Found {len(data)} outdated package(s):')
print(f' {\"Package\":<30} {\"Current\":<15} {\"Wanted\":<15} {\"Latest\":<15}')
print(f' {\"─\"*30} {\"─\"*15} {\"─\"*15} {\"─\"*15}')
for pkg, info in sorted(data.items()):
current = info.get('current', '?')
wanted = info.get('wanted', '?')
latest = info.get('latest', '?')
marker = ' ← MAJOR' if current.split('.')[0] != latest.split('.')[0] else ''
print(f' {pkg:<30} {current:<15} {wanted:<15} {latest:<15}{marker}')
except (json.JSONDecodeError, KeyError):
print(' ⚠ Could not parse npm outdated output')
" 2>/dev/null || echo " ⚠ Could not parse npm outdated output"
FOUND_OUTDATED=1
else
echo " ✓ All npm packages are current"
fi
# Check for npm audit vulnerabilities
echo " Checking for known vulnerabilities..."
AUDIT=$(cd "$PROJECT_DIR" && npm audit --json 2>/dev/null || true)
VULN_COUNT=$(echo "$AUDIT" | python3 -c "
import json, sys
try:
data = json.load(sys.stdin)
total = data.get('metadata', {}).get('vulnerabilities', {})
count = sum(v for k, v in total.items() if k != 'info')
print(count)
except:
print(0)
" 2>/dev/null || echo "0")
if [[ "$VULN_COUNT" -gt 0 ]]; then
echo " ⚠ Found $VULN_COUNT known vulnerabilities — run 'npm audit' for details"
else
echo " ✓ No known vulnerabilities"
fi
else
echo " ⚠ npm not found"
fi
echo
fi
# ── Rust (cargo) ──────────────────────────────────────────────────────────────
if [[ -f "$PROJECT_DIR/Cargo.toml" ]]; then
CHECKED=$((CHECKED + 1))
echo "── Rust (cargo) ──"
if command -v cargo &>/dev/null; then
# Check if cargo-outdated is installed
if cargo outdated --version &>/dev/null 2>&1; then
echo " Checking for outdated crates..."
OUTDATED=$(cd "$PROJECT_DIR" && cargo outdated --root-deps-only 2>/dev/null || true)
if echo "$OUTDATED" | grep -qE '^\w'; then
echo "$OUTDATED" | head -20
FOUND_OUTDATED=1
else
echo " ✓ All Rust crates are current"
fi
else
echo " ⚠ cargo-outdated not installed — install with: cargo install cargo-outdated"
echo " Falling back to Cargo.lock age check..."
if [[ -f "$PROJECT_DIR/Cargo.lock" ]]; then
LOCK_AGE_DAYS=$(( ($(date +%s) - $(stat -c %Y "$PROJECT_DIR/Cargo.lock")) / 86400 ))
if [[ "$LOCK_AGE_DAYS" -gt 30 ]]; then
echo " ⚠ Cargo.lock is $LOCK_AGE_DAYS days old — consider running 'cargo update'"
else
echo " ✓ Cargo.lock updated within last 30 days ($LOCK_AGE_DAYS days ago)"
fi
fi
fi
# Check for cargo audit
if cargo audit --version &>/dev/null 2>&1; then
echo " Checking for known vulnerabilities..."
if (cd "$PROJECT_DIR" && cargo audit --quiet 2>/dev/null); then
echo " ✓ No known vulnerabilities"
else
echo " ⚠ Vulnerabilities found — run 'cargo audit' for details"
FOUND_OUTDATED=1
fi
else
echo " ⚠ cargo-audit not installed — install with: cargo install cargo-audit"
fi
else
echo " ⚠ cargo not found"
fi
echo
fi
# ── Summary ───────────────────────────────────────────────────────────────────
if [[ "$CHECKED" -eq 0 ]]; then
echo "No recognized project manifests found (pyproject.toml, package.json, Cargo.toml)"
exit 2
fi
echo "=== Summary ==="
if [[ "$FOUND_OUTDATED" -eq 0 ]]; then
echo "All dependencies are current across $CHECKED project(s)."
exit 0
else
echo "Outdated dependencies found. Review above and update as needed."
exit 1
fi

87
bin/check-version-sync.sh Executable file
View file

@ -0,0 +1,87 @@
#!/usr/bin/env bash
# check-version-sync.sh — Pre-commit hook to verify all version-bearing files match.
# Exits non-zero if versions are out of sync or CHANGELOG.md is missing an entry.
set -euo pipefail
PROJECT_DIR="${1:-.}"
cd "$PROJECT_DIR"
versions=()
files_checked=()
# Extract version from each file type
if [ -f "Cargo.toml" ]; then
v=$(grep -m1 '^version' Cargo.toml | sed 's/.*"\(.*\)".*/\1/')
[ -n "$v" ] && versions+=("Cargo.toml=$v") && files_checked+=("Cargo.toml")
fi
if [ -f "package.json" ]; then
v=$(python3 -c "import json; print(json.load(open('package.json')).get('version',''))" 2>/dev/null)
[ -n "$v" ] && versions+=("package.json=$v") && files_checked+=("package.json")
fi
if [ -f "pyproject.toml" ]; then
v=$(grep -m1 '^version' pyproject.toml | sed 's/.*"\(.*\)".*/\1/')
[ -n "$v" ] && versions+=("pyproject.toml=$v") && files_checked+=("pyproject.toml")
fi
if [ -f ".claude-plugin/plugin.json" ]; then
v=$(python3 -c "import json; print(json.load(open('.claude-plugin/plugin.json')).get('version',''))" 2>/dev/null)
[ -n "$v" ] && versions+=(".claude-plugin/plugin.json=$v") && files_checked+=(".claude-plugin/plugin.json")
fi
if [ -f ".codex-plugin/plugin.json" ]; then
v=$(python3 -c "import json; print(json.load(open('.codex-plugin/plugin.json')).get('version',''))" 2>/dev/null)
[ -n "$v" ] && versions+=(".codex-plugin/plugin.json=$v") && files_checked+=(".codex-plugin/plugin.json")
fi
if [ -f "gemini-extension.json" ]; then
v=$(python3 -c "import json; print(json.load(open('gemini-extension.json')).get('version',''))" 2>/dev/null)
[ -n "$v" ] && versions+=("gemini-extension.json=$v") && files_checked+=("gemini-extension.json")
fi
# Need at least one version source
if [ ${#versions[@]} -eq 0 ]; then
echo "[version-sync] No version-bearing files found — skipping"
exit 0
fi
# Check all versions match
canonical=""
mismatch=0
for entry in "${versions[@]}"; do
file="${entry%%=*}"
ver="${entry##*=}"
if [ -z "$canonical" ]; then
canonical="$ver"
elif [ "$ver" != "$canonical" ]; then
mismatch=1
fi
done
if [ "$mismatch" -eq 1 ]; then
echo "[version-sync] FAIL — versions are out of sync:"
for entry in "${versions[@]}"; do
file="${entry%%=*}"
ver="${entry##*=}"
marker=" "
[ "$ver" != "$canonical" ] && marker="!"
echo " $marker $file: $ver"
done
echo ""
echo "All version-bearing files must have the same version."
echo "Files checked: ${files_checked[*]}"
exit 1
fi
# Check CHANGELOG.md has an entry for the current version
if [ -f "CHANGELOG.md" ]; then
if ! grep -qF "$canonical" CHANGELOG.md; then
echo "[version-sync] WARN — CHANGELOG.md has no entry for version $canonical"
echo " Add a changelog entry before pushing."
# Warning only, not blocking
fi
fi
echo "[version-sync] OK — all ${#versions[@]} files at v${canonical}"
exit 0

View file

@ -1,267 +0,0 @@
#!/usr/bin/env bash
# ensure-ignore-files.sh — Ensure .gitignore and .dockerignore have all required patterns
#
# Modes:
# (default) Append missing patterns to the files (SessionStart hook)
# --check Report missing patterns and exit non-zero if any are missing (pre-commit/CI)
#
# Usage:
# bash scripts/ensure-ignore-files.sh [--check] [project-dir]
#
# As a plugin hook:
# "command": "${CLAUDE_PLUGIN_ROOT}/hooks/scripts/ensure-ignore-files.sh"
set -euo pipefail
CHECK_MODE=false
if [[ "${1:-}" == "--check" ]]; then
CHECK_MODE=true
shift
fi
PROJECT_DIR="${1:-${CLAUDE_PLUGIN_ROOT:-.}}"
PASS=0
FAIL=0
WARN=0
pass() { PASS=$((PASS + 1)); if $CHECK_MODE; then echo "$1"; fi; }
fail() { FAIL=$((FAIL + 1)); echo " ✗ FAIL: $1$2"; }
warn() { WARN=$((WARN + 1)); if $CHECK_MODE; then echo " ⚠ WARN: $1$2"; fi; }
ensure_pattern() {
local file="$1"
local pattern="$2"
local label="$3"
if grep -qxF "$pattern" "$file" 2>/dev/null; then
pass "$label: '$pattern'"
elif $CHECK_MODE; then
fail "$label: '$pattern'" "missing"
else
echo "$pattern" >> "$file"
pass "$label: '$pattern' (added)"
fi
}
# ═══════════════════════════════════════════════════════════════════════════════
# .gitignore — full required pattern list from plugin-setup-guide
# ═══════════════════════════════════════════════════════════════════════════════
GITIGNORE="$PROJECT_DIR/.gitignore"
if $CHECK_MODE; then echo "=== Ignore Files Check: $PROJECT_DIR ==="; echo "── .gitignore ──"; fi
if [[ ! -f "$GITIGNORE" ]] && $CHECK_MODE; then
fail ".gitignore" "File not found — every plugin repo must have a .gitignore"
else
touch "$GITIGNORE"
# ── Secrets ──
REQUIRED_GIT=(
".env"
".env.*"
"!.env.example"
)
# ── Runtime / hook artifacts ──
REQUIRED_GIT+=(
"*.log"
)
# ── Claude Code / AI tooling ──
REQUIRED_GIT+=(
".claude/settings.local.json"
".claude/worktrees/"
".omc/"
".lavra/"
".beads/"
".serena/"
".worktrees"
".full-review/"
".full-review-archive-*"
)
# ── IDE / editor ──
REQUIRED_GIT+=(
".vscode/"
".cursor/"
".windsurf/"
".1code/"
)
# ── Caches ──
REQUIRED_GIT+=(
".cache/"
)
# ── Documentation artifacts ──
REQUIRED_GIT+=(
"docs/plans/"
"docs/sessions/"
"docs/reports/"
"docs/research/"
"docs/superpowers/"
)
for pattern in "${REQUIRED_GIT[@]}"; do
ensure_pattern "$GITIGNORE" "$pattern" ".gitignore"
done
# ── Language-specific (check only, don't auto-add — user must uncomment) ──
if $CHECK_MODE; then
if [[ -f "$PROJECT_DIR/pyproject.toml" ]]; then
echo " Detected: Python project"
for p in ".venv/" "__pycache__/" "*.py[oc]" "*.egg-info/" "dist/" "build/"; do
if grep -qxF "$p" "$GITIGNORE" 2>/dev/null; then
pass ".gitignore (Python): '$p'"
else
warn ".gitignore (Python)" "'$p' not found — uncomment Python section"
fi
done
fi
if [[ -f "$PROJECT_DIR/package.json" ]]; then
echo " Detected: TypeScript/JavaScript project"
for p in "node_modules/" "dist/" "build/"; do
if grep -qxF "$p" "$GITIGNORE" 2>/dev/null; then
pass ".gitignore (TypeScript): '$p'"
else
warn ".gitignore (TypeScript)" "'$p' not found — uncomment TS section"
fi
done
fi
if [[ -f "$PROJECT_DIR/Cargo.toml" ]]; then
echo " Detected: Rust project"
for p in "target/"; do
if grep -qxF "$p" "$GITIGNORE" 2>/dev/null; then
pass ".gitignore (Rust): '$p'"
else
warn ".gitignore (Rust)" "'$p' not found — uncomment Rust section"
fi
done
fi
# Verify .env.example is NOT ignored
if git -C "$PROJECT_DIR" check-ignore .env.example > /dev/null 2>&1; then
fail ".gitignore" ".env.example is being ignored — '!.env.example' must come after '.env.*'"
else
pass ".gitignore: .env.example is tracked (not ignored)"
fi
fi
fi
# ═══════════════════════════════════════════════════════════════════════════════
# .dockerignore — full required pattern list from plugin-setup-guide
# ═══════════════════════════════════════════════════════════════════════════════
DOCKERIGNORE="$PROJECT_DIR/.dockerignore"
# Skip if no Dockerfile
if [[ ! -f "$PROJECT_DIR/Dockerfile" ]]; then
if $CHECK_MODE; then echo; echo "── .dockerignore ──"; echo " No Dockerfile found — skipping"; fi
else
if $CHECK_MODE; then echo; echo "── .dockerignore ──"; fi
if [[ ! -f "$DOCKERIGNORE" ]] && $CHECK_MODE; then
fail ".dockerignore" "File not found — required when Dockerfile exists"
else
touch "$DOCKERIGNORE"
# ── Version control ──
REQUIRED_DOCKER=(
".git"
".github"
)
# ── Secrets ──
REQUIRED_DOCKER+=(
".env"
".env.*"
"!.env.example"
)
# ── Claude Code / AI tooling ──
REQUIRED_DOCKER+=(
".claude"
".claude-plugin"
".codex-plugin"
".omc"
".lavra"
".beads"
".serena"
".worktrees"
".full-review"
".full-review-archive-*"
)
# ── IDE / editor ──
REQUIRED_DOCKER+=(
".vscode"
".cursor"
".windsurf"
".1code"
)
# ── Docs, tests, scripts — not needed at runtime ──
REQUIRED_DOCKER+=(
"docs"
"tests"
"scripts"
"*.md"
"!README.md"
)
# ── Runtime artifacts ──
REQUIRED_DOCKER+=(
"logs"
"backups"
"*.log"
".cache"
)
for pattern in "${REQUIRED_DOCKER[@]}"; do
ensure_pattern "$DOCKERIGNORE" "$pattern" ".dockerignore"
done
# ── Language-specific (check only) ──
if $CHECK_MODE; then
if [[ -f "$PROJECT_DIR/pyproject.toml" ]]; then
for p in ".venv" "__pycache__/" "*.py[oc]" "*.egg-info" "dist/"; do
if grep -qxF "$p" "$DOCKERIGNORE" 2>/dev/null; then
pass ".dockerignore (Python): '$p'"
else
warn ".dockerignore (Python)" "'$p' not found — uncomment Python section"
fi
done
fi
if [[ -f "$PROJECT_DIR/package.json" ]]; then
for p in "node_modules/" "dist/" "coverage/"; do
if grep -qxF "$p" "$DOCKERIGNORE" 2>/dev/null; then
pass ".dockerignore (TypeScript): '$p'"
else
warn ".dockerignore (TypeScript)" "'$p' not found — uncomment TS section"
fi
done
fi
if [[ -f "$PROJECT_DIR/Cargo.toml" ]]; then
for p in "target/"; do
if grep -qxF "$p" "$DOCKERIGNORE" 2>/dev/null; then
pass ".dockerignore (Rust): '$p'"
else
warn ".dockerignore (Rust)" "'$p' not found — uncomment Rust section"
fi
done
fi
fi
fi
fi
# ═══════════════════════════════════════════════════════════════════════════════
# Summary
# ═══════════════════════════════════════════════════════════════════════════════
if $CHECK_MODE; then
echo
echo "Results: $PASS passed, $FAIL failed, $WARN warnings"
[[ "$FAIL" -eq 0 ]] && echo "IGNORE FILES CHECK PASSED" && exit 0
echo "IGNORE FILES CHECK FAILED" && exit 1
fi

14
bin/sync-uv.sh Executable file
View file

@ -0,0 +1,14 @@
#!/usr/bin/env bash
set -euo pipefail
REPO_ROOT="${CLAUDE_PLUGIN_ROOT:-$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)}"
DATA_ROOT="${CLAUDE_PLUGIN_DATA:-${REPO_ROOT}}"
VENV_DIR="${DATA_ROOT}/.venv"
if [[ ! -f "${REPO_ROOT}/uv.lock" ]]; then
echo "sync-uv.sh: missing lockfile at ${REPO_ROOT}/uv.lock" >&2
exit 1
fi
mkdir -p "${DATA_ROOT}"
UV_PROJECT_ENVIRONMENT="${VENV_DIR}" uv sync --project "${REPO_ROOT}"

View file

@ -26,9 +26,6 @@ Pre-release and quality checklist. Complete all items before tagging a release.
- [ ] No credentials in code, docs, or commit history
- [ ] `~/.unraid-mcp/.env` has `chmod 600` permissions
- [ ] `~/.unraid-mcp/` directory has `chmod 700` permissions
- [ ] `bin/check-docker-security.sh` passes
- [ ] `bin/check-no-baked-env.sh` passes
- [ ] `bin/ensure-ignore-files.sh --check` passes
- [ ] Bearer token uses constant-time comparison (`hmac.compare_digest`)
- [ ] No sensitive values logged (even at DEBUG level)
- [ ] `UNRAID_MCP_BEARER_TOKEN` removed from `os.environ` after startup
@ -52,16 +49,14 @@ Pre-release and quality checklist. Complete all items before tagging a release.
## CI/CD
- [ ] `ci.yml` lint, typecheck, test, version-sync, audit, docker-security jobs pass
- [ ] `ci.yml` lint, typecheck, test, version-sync, audit, gitleaks jobs pass
- [ ] `docker-publish.yml` builds multi-arch (amd64, arm64) images
- [ ] `publish-pypi.yml` tag-version check, PyPI publish, GitHub release, MCP registry publish all configured
- [ ] Trivy vulnerability scan runs on published images
## Hooks
- [ ] `hooks/hooks.json` registers PostToolUse hooks
- [ ] `fix-env-perms.sh` enforces 600 on credential files
- [ ] `ensure-ignore-files.sh` keeps `.gitignore` and `.dockerignore` aligned
- [ ] `hooks/hooks.json` registers SessionStart hook (`bin/sync-uv.sh`)
## Documentation

View file

@ -111,7 +111,4 @@ Subscription data with log content is capped at 1 MB / 5,000 lines to prevent un
## Hooks enforcement
PostToolUse hooks run after every Write, Edit, MultiEdit, or Bash operation:
- `fix-env-perms.sh`: Ensures `~/.unraid-mcp/.env` stays at mode 600
- `ensure-ignore-files.sh`: Keeps `.gitignore` and `.dockerignore` aligned with security requirements
PostToolUse hooks run after every Write, Edit, MultiEdit, or Bash operation. See `docs/plugin/HOOKS.md` for current hook configuration.

View file

@ -78,29 +78,28 @@ Complete listing of all plugin components.
| --- | --- | --- |
| `skills/unraid/SKILL.md` | unraid | Client-facing skill with all domains, subactions, and workflows |
## bin/ scripts
| Path | Language | Description |
| --- | --- | --- |
| `bin/sync-uv.sh` | Bash | SessionStart hook: sync uv virtual environment |
| `bin/block-env-commits.sh` | Bash | Pre-commit hook: block accidental .env file commits |
| `bin/bump-version.sh` | Bash | Bump version across all version-bearing files |
| `bin/check-version-sync.sh` | Bash | Verify version consistency across pyproject.toml and manifests |
| `bin/validate-marketplace.sh` | Bash | Validate Claude Code marketplace and plugin manifest structure |
| `bin/generate_unraid_api_reference.py` | Python | Generate canonical GraphQL API docs from live Unraid introspection |
## Hooks
| Hook | Trigger | Script |
| --- | --- | --- |
| Fix env permissions | PostToolUse (Write/Edit/Bash) | `hooks/scripts/fix-env-perms.sh` |
| Ensure ignore files | PostToolUse (Write/Edit/Bash) | `hooks/scripts/ensure-ignore-files.sh` |
## Scripts
| Script | Purpose |
| --- | --- |
| `bin/check-docker-security.sh` | Dockerfile security audit |
| `bin/check-no-baked-env.sh` | Verify no env vars baked into images |
| `bin/check-outdated-deps.sh` | Dependency freshness check |
| `bin/ensure-ignore-files.sh` | Gitignore/dockerignore alignment |
| `bin/generate_unraid_api_reference.py` | Generate canonical API docs and schema change report from GraphQL introspection |
| `bin/validate-marketplace.sh` | Marketplace JSON validation |
| Sync uv environment | SessionStart | `bin/sync-uv.sh` |
## CI/CD workflows
| Workflow | Trigger | Jobs |
| --- | --- | --- |
| `ci.yml` | Push/PR to main | lint, typecheck, test, version-sync, mcp-integration, audit, docker-security |
| `ci.yml` | Push/PR to main | lint, typecheck, test, version-sync, mcp-integration, audit, gitleaks |
| `docker-publish.yml` | Push to main/tags | Build multi-arch Docker image, push to ghcr.io, Trivy scan |
| `publish-pypi.yml` | Tag `v*.*.*` | Build, PyPI publish, GitHub release, MCP registry publish |

View file

@ -24,9 +24,6 @@ The individual plugin configuration for the Unraid MCP server.
- Repository and homepage links
- `mcpServers` block that configures the server to run via `uv run unraid-mcp-server` in stdio mode
### 3. Validation Script
- `bin/validate-marketplace.sh` — Automated validation of marketplace structure
## MCP Tools Exposed
The plugin registers **3 MCP tools**:
@ -109,6 +106,22 @@ Install from a specific branch or commit:
/plugin marketplace add jmagar/unraid-mcp#abc123
```
## Validation Script
To verify the marketplace and plugin structure is valid before publishing:
```bash
bash bin/validate-marketplace.sh [repo-root]
```
The script checks:
- Marketplace and plugin JSON manifests exist and are valid
- Required plugin files are in place (`SKILL.md`, `README.md`, `scripts/`, `examples/`, `references/`)
- Plugin is listed in the marketplace manifest
- Version numbers are in sync between `pyproject.toml` and `.claude-plugin/plugin.json`
Exits 0 on success, 1 if any check fails.
## Plugin Structure
```text
@ -124,7 +137,8 @@ unraid-mcp/
│ ├── core/ # GraphQL client, exceptions, shared types
│ └── subscriptions/ # Real-time WebSocket subscription manager
└── bin/
└── validate-marketplace.sh # Validation tool
├── sync-uv.sh # Sync uv environment at SessionStart
└── validate-marketplace.sh # Validate marketplace/plugin structure
```
## Marketplace Metadata
@ -144,12 +158,7 @@ unraid-mcp/
Before publishing to GitHub:
1. **Validate Structure**
```bash
./bin/validate-marketplace.sh
```
2. **Update Version Numbers** (must be in sync)
1. **Update Version Numbers** (must be in sync)
- `pyproject.toml``version = "X.Y.Z"` under `[project]`
- `.claude-plugin/plugin.json``"version": "X.Y.Z"`
- `.claude-plugin/marketplace.json``"version"` in both `metadata` and `plugins[]`
@ -204,8 +213,7 @@ To release a new version:
1. Make changes to the plugin code
2. Update version in `pyproject.toml`, `.claude-plugin/plugin.json`, and `.claude-plugin/marketplace.json`
3. Run validation: `./bin/validate-marketplace.sh`
4. Commit and push
3. Commit and push
Users with the plugin installed will see the update available and can upgrade:
```bash
@ -218,18 +226,5 @@ Users with the plugin installed will see the update available and can upgrade:
- **Issues:** https://github.com/jmagar/unraid-mcp/issues
- **Destructive Actions:** `docs/DESTRUCTIVE_ACTIONS.md`
## Validation
Run the validation script anytime to ensure marketplace integrity:
```bash
./bin/validate-marketplace.sh
```
This checks:
- Manifest file existence and validity
- JSON syntax
- Required fields
- Plugin structure
- Source path accuracy
- Documentation completeness

View file

@ -16,7 +16,6 @@ GitHub Actions configuration for unraid-mcp.
| `version-sync` | Verify all version files match | Shell script comparing pyproject.toml, plugin.json (x3), gemini-extension.json |
| `mcp-integration` | Live MCP integration tests | `test_live.sh` with secrets (push/same-repo PRs only) |
| `audit` | Dependency security audit | `uv audit` |
| `docker-security` | Docker security checks | `check-docker-security.sh`, `check-no-baked-env.sh`, `ensure-ignore-files.sh` |
### `docker-publish.yml` -- Docker Image Build
@ -73,11 +72,10 @@ Failure blocks the CI pipeline.
| format | `just fmt` or `uv run ruff format .` |
| typecheck | `just typecheck` or `uv run ty check unraid_mcp/` |
| test | `just test` or `uv run pytest` |
| docker-security | `just check-contract` |
| build | `just build` or `docker build -t unraid-mcp .` |
## See Also
- [TESTS.md](TESTS.md) -- Test suite details
- [PUBLISH.md](PUBLISH.md) -- Versioning and release strategy
- [PRE-COMMIT.md](PRE-COMMIT.md) -- Local pre-commit checks
- [PRE-COMMIT.md](PRE-COMMIT.md) -- Local git hook checks

View file

@ -44,12 +44,6 @@ Configured for Python 3.12 with respect for `type: ignore` comments.
The `.claude-plugin/hooks/hooks.json` registers hooks that run after Write, Edit, MultiEdit, or Bash operations:
### fix-env-perms.sh
Ensures credential files maintain secure permissions:
- `~/.unraid-mcp/.env` stays at mode 600
- `~/.unraid-mcp/` directory stays at mode 700
### ensure-ignore-files.sh
Keeps `.gitignore` and `.dockerignore` aligned:

View file

@ -8,23 +8,17 @@ unraid-mcp registers PostToolUse hooks that run after Write, Edit, MultiEdit, or
**File**: `hooks/hooks.json`
The hooks configuration registers a single `SessionStart` hook:
```json
{
"description": "Enforce 600 permissions and keep gitignore aligned",
"hooks": {
"PostToolUse": [
"SessionStart": [
{
"matcher": "Write|Edit|MultiEdit|Bash",
"hooks": [
{
"type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/scripts/fix-env-perms.sh",
"timeout": 5
},
{
"type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/scripts/ensure-ignore-files.sh",
"timeout": 5
"command": "${CLAUDE_PLUGIN_ROOT}/bin/sync-uv.sh"
}
]
}
@ -35,40 +29,16 @@ unraid-mcp registers PostToolUse hooks that run after Write, Edit, MultiEdit, or
## Hook scripts
### fix-env-perms.sh
### bin/sync-uv.sh
**Purpose**: Ensures credential files maintain secure permissions after any file operation.
**Purpose**: Ensures the uv environment is up to date at the start of each session.
- Sets `~/.unraid-mcp/.env` to mode 600
- Sets `~/.unraid-mcp/` directory to mode 700
- Runs silently (no output on success)
- Timeout: 5 seconds
### ensure-ignore-files.sh
**Purpose**: Keeps `.gitignore` and `.dockerignore` aligned with security requirements.
- Verifies sensitive patterns are present in ignore files
- Prevents credential files from being committed or included in Docker images
- Can run in check mode (`--check`) for CI validation
- Timeout: 5 seconds
### Other hook scripts
| Script | Purpose |
|--------|---------|
| `ensure-gitignore.sh` | Gitignore-specific enforcement |
| `sync-env.sh` | Environment file synchronization |
- Runs `uv sync` to install/update dependencies
- Runs silently on success
## Trigger
Hooks fire after every:
- `Write` -- new file creation
- `Edit` -- file modification
- `MultiEdit` -- batch file modification
- `Bash` -- shell command execution
The 5-second timeout ensures hooks never block the development workflow.
The hook fires once at `SessionStart` to synchronize the Python environment.
## See Also

View file

@ -82,18 +82,15 @@ unraid-mcp/
+-- hooks/
| +-- hooks.json # PostToolUse hook definitions
| +-- scripts/
| +-- fix-env-perms.sh # Credential permission enforcement
| +-- ensure-ignore-files.sh # Gitignore/dockerignore alignment
| +-- ensure-gitignore.sh # Gitignore-specific checks
| +-- sync-env.sh # Environment file synchronization
|
+-- bin/
| +-- check-docker-security.sh # Dockerfile security audit
| +-- check-no-baked-env.sh # No baked environment variables
| +-- check-outdated-deps.sh # Dependency freshness
| +-- ensure-ignore-files.sh # Ignore file validation
| +-- generate_unraid_api_reference.py # GraphQL schema to docs
| +-- validate-marketplace.sh # Marketplace JSON validation
| +-- CLAUDE.md # AI assistant instructions for bin/ scripts
| +-- sync-uv.sh # SessionStart hook: sync uv virtual environment
| +-- block-env-commits.sh # Pre-commit hook: block .env file commits
| +-- bump-version.sh # Bump version across all version-bearing files
| +-- check-version-sync.sh # Verify version consistency across manifest files
| +-- validate-marketplace.sh # Validate marketplace/plugin manifest structure
| +-- generate_unraid_api_reference.py # Generate GraphQL API docs from live introspection
|
+-- tests/ # Test suite (see mcp/TESTS.md)
| +-- conftest.py

View file

@ -1,92 +1,59 @@
# Scripts Reference -- unraid-mcp
## Quality gate scripts (`bin/`)
### check-docker-security.sh
Audits the Dockerfile for security best practices:
- Non-root user verification
- No hardcoded secrets
- Proper permissions
- Minimal base image
```bash
bash bin/check-docker-security.sh Dockerfile
```
### check-no-baked-env.sh
Verifies no environment variables are baked into Docker images or committed to version control:
- Scans Dockerfile for `ENV` directives with secrets
- Checks for `.env` files in tracked directories
```bash
bash bin/check-no-baked-env.sh .
```
### check-outdated-deps.sh
Checks for outdated Python dependencies:
```bash
bash bin/check-outdated-deps.sh
```
### ensure-ignore-files.sh
Validates `.gitignore` and `.dockerignore` contain required patterns:
- Credential files
- Cache directories
- Build artifacts
- Log files
```bash
# Check mode (CI)
bash bin/ensure-ignore-files.sh --check .
# Fix mode (development)
bash bin/ensure-ignore-files.sh .
```
### validate-marketplace.sh
Validates marketplace JSON configuration:
```bash
bash bin/validate-marketplace.sh
```
## Utility scripts
### generate_unraid_api_reference.py
Generates the canonical Unraid API docs from GraphQL schema introspection:
```bash
python bin/generate_unraid_api_reference.py
```
Produces:
- `docs/unraid/UNRAID-API-SUMMARY.md`
- `docs/unraid/UNRAID-API-COMPLETE-REFERENCE.md`
- `docs/unraid/UNRAID-API-INTROSPECTION.json`
- `docs/unraid/UNRAID-SCHEMA.graphql`
- `docs/unraid/UNRAID-API-CHANGES.md`
## Hook scripts (`hooks/scripts/`)
| Script | Purpose |
|--------|---------|
| `fix-env-perms.sh` | Enforce 600 permissions on credential files |
| `ensure-ignore-files.sh` | Keep ignore files aligned |
| `ensure-gitignore.sh` | Gitignore-specific checks |
| `sync-env.sh` | Environment file synchronization |
No hook scripts remain — the hooks directory only contains `hooks.json` registering `bin/sync-uv.sh` at SessionStart.
## bin/ scripts
### `bin/validate-marketplace.sh`
Validates the Claude Code marketplace and plugin structure. Checks JSON manifests, required files, plugin listing, marketplace metadata, and version sync between `pyproject.toml` and `.claude-plugin/plugin.json`.
**Usage:**
```bash
bash bin/validate-marketplace.sh [repo-root]
```
Run from the repo root (no arguments needed in that case). Exits 0 if all checks pass, 1 on any failure. Prints a summary of passed/failed checks.
**What it validates:**
- `.claude-plugin/marketplace.json` exists and is valid JSON with required fields
- `.claude-plugin/plugin.json` exists and is valid JSON with required fields
- Required skill files (`skills/unraid/SKILL.md`, `README.md`, `scripts/`, `examples/`, `references/`)
- Plugin is listed in the marketplace manifest
- Version in `pyproject.toml` matches version in `.claude-plugin/plugin.json`
### `bin/generate_unraid_api_reference.py`
Generates canonical Unraid GraphQL documentation from live API introspection. Connects to the Unraid GraphQL endpoint, runs a full introspection query, and writes several output files under `docs/unraid/`:
- `UNRAID-API-COMPLETE-REFERENCE.md` — Full human-readable type/field reference
- `UNRAID-API-SUMMARY.md` — Condensed root operations summary with tables
- `UNRAID-API-INTROSPECTION.json` — Raw introspection JSON snapshot
- `UNRAID-SCHEMA.graphql` — SDL schema output
- `UNRAID-API-CHANGES.md` — Diff of type/field changes vs. the previous snapshot
**Usage:**
```bash
uv run python bin/generate_unraid_api_reference.py
```
Requires `UNRAID_API_URL` and `UNRAID_API_KEY` environment variables (or `--api-url` / `--api-key` flags). SSL verification is disabled by default for self-signed certificates; pass `--verify-ssl` to enable.
**Key flags:**
```
--api-url URL GraphQL endpoint (default: $UNRAID_API_URL)
--api-key KEY API key (default: $UNRAID_API_KEY)
--verify-ssl Enable SSL cert verification
--include-introspection-types Include __Schema/__Type etc. in output
--timeout-seconds N HTTP timeout (default: 90)
```
## CI usage
Scripts are called by CI workflows:
- `ci.yml` runs `bin/check-docker-security.sh`, `bin/check-no-baked-env.sh`, `bin/ensure-ignore-files.sh --check`
- `just check-contract` runs all three locally
- `ci.yml` runs lint, typecheck, test, version-sync, mcp-integration, audit, and gitleaks jobs
## See Also

View file

@ -74,6 +74,23 @@ The full Unraid GraphQL schema is available in:
- `docs/unraid/UNRAID-API-INTROSPECTION.json` -- Introspection result
- `docs/unraid/UNRAID-API-COMPLETE-REFERENCE.md` -- Human-readable reference
### Regenerating the schema docs
All schema docs are generated from live introspection by `bin/generate_unraid_api_reference.py`. Run it whenever the Unraid API changes:
```bash
uv run python bin/generate_unraid_api_reference.py
```
Requires `UNRAID_API_URL` and `UNRAID_API_KEY` in the environment (or pass `--api-url` / `--api-key`). The script writes all five output files under `docs/unraid/` and prints each path on success. It also computes a diff against the previous introspection snapshot and writes it to `UNRAID-API-CHANGES.md`.
Additional flags:
```
--verify-ssl Enable SSL cert verification (disabled by default for self-signed certs)
--include-introspection-types Include __Schema/__Type etc. in generated output
--timeout-seconds N HTTP timeout (default: 90)
```
### Query organization
Queries are organized into domain dicts in each `tools/_<domain>.py` module:
@ -115,16 +132,6 @@ Defined in `subscriptions/queries.py`:
- `notificationAdded` -- New notification events
- `logFile` -- Log file tail (requires `path` variable)
## Generating API docs
The `scripts/generate_unraid_api_reference.py` script generates documentation from GraphQL introspection:
```bash
python scripts/generate_unraid_api_reference.py
```
This queries the Unraid API's introspection endpoint and produces structured documentation of all types, queries, mutations, and subscriptions.
## See Also
- [../mcp/TOOLS.md](../mcp/TOOLS.md) -- How queries map to tool actions

View file

@ -1,6 +1,6 @@
{
"name": "unraid-mcp",
"version": "1.3.6",
"version": "1.3.8",
"description": "Query, monitor, and manage Unraid servers via GraphQL API through MCP tools. Supports system info, Docker, VMs, array/parity, notifications, plugins, and live telemetry.",
"mcpServers": {
"unraid-mcp": {

7
hooks/CLAUDE.md Normal file
View file

@ -0,0 +1,7 @@
# `hooks/`
Use this subtree only for hook entrypoints that are called from `hooks/hooks.json`.
- Put hook-specific wrappers here when Claude Code needs to run them automatically
- Use `bin/` for reusable executables and helpers you may run manually or from hooks
- If logic is useful outside the hook lifecycle, keep the implementation in `bin/` and make the hook call it

View file

@ -1,36 +1,11 @@
{
"description": "Sync userConfig credentials to .env, enforce 600 permissions, ensure gitignore",
"hooks": {
"SessionStart": [
{
"hooks": [
{
"type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/scripts/sync-env.sh",
"timeout": 10
},
{
"type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/scripts/ensure-ignore-files.sh",
"timeout": 5
}
]
}
],
"PostToolUse": [
{
"matcher": "Write|Edit|MultiEdit|Bash",
"hooks": [
{
"type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/scripts/fix-env-perms.sh",
"timeout": 5,
"if": "Write(*.env*)|Edit(*.env*)|MultiEdit(*.env*)|Bash(*.env*)"
},
{
"type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/scripts/ensure-ignore-files.sh",
"timeout": 5
"command": "${CLAUDE_PLUGIN_ROOT}/bin/sync-uv.sh"
}
]
}

View file

@ -1,61 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
GITIGNORE="${CLAUDE_PLUGIN_ROOT}/.gitignore"
REQUIRED=(
".env"
".env.*"
"!.env.example"
"backups/*"
"!backups/.gitkeep"
"logs/*"
"!logs/.gitkeep"
"__pycache__/"
)
touch "$GITIGNORE"
existing="$(cat "$GITIGNORE")"
for pattern in "${REQUIRED[@]}"; do
if ! grep -qxF "$pattern" "$GITIGNORE" 2>/dev/null; then
existing+=$'\n'"$pattern"
fi
done
printf '%s\n' "$existing" | awk '
BEGIN {
want[".env"]=1
want[".env.*"]=1
want["!.env.example"]=1
want["backups/*"]=1
want["!backups/.gitkeep"]=1
want["logs/*"]=1
want["!logs/.gitkeep"]=1
want["__pycache__/"]=1
}
{ lines[++n]=$0 }
END {
emitted[""] = 1
for (i = 1; i <= n; i++) {
if (!want[lines[i]] && !emitted[lines[i]]) {
print lines[i]
emitted[lines[i]] = 1
}
}
ordered[1]=".env"
ordered[2]=".env.*"
ordered[3]="!.env.example"
ordered[4]="backups/*"
ordered[5]="!backups/.gitkeep"
ordered[6]="logs/*"
ordered[7]="!logs/.gitkeep"
ordered[8]="__pycache__/"
for (i = 1; i <= 8; i++) {
if (!emitted[ordered[i]]) {
print ordered[i]
emitted[ordered[i]] = 1
}
}
}
' > "$GITIGNORE"

View file

@ -1,72 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
CHECK_MODE=false
if [[ "${1:-}" == "--check" ]]; then
CHECK_MODE=true
fi
GITIGNORE="${CLAUDE_PLUGIN_ROOT}/.gitignore"
REQUIRED=(
".env"
".env.*"
"!.env.example"
"backups/*"
"!backups/.gitkeep"
"logs/*"
"!logs/.gitkeep"
"__pycache__/"
)
if [ "$CHECK_MODE" = true ]; then
missing=()
for pattern in "${REQUIRED[@]}"; do
if ! grep -qxF "$pattern" "$GITIGNORE" 2>/dev/null; then
missing+=("$pattern")
fi
done
if [ "${#missing[@]}" -gt 0 ]; then
echo "ensure-ignore-files: missing patterns in .gitignore:" >&2
for p in "${missing[@]}"; do
echo " $p" >&2
done
exit 1
fi
exit 0
fi
touch "$GITIGNORE"
existing="$(cat "$GITIGNORE")"
for pattern in "${REQUIRED[@]}"; do
if ! grep -qxF "$pattern" "$GITIGNORE" 2>/dev/null; then
existing+=$'\n'"$pattern"
fi
done
GITIGNORE_TMP="${GITIGNORE}.tmp.$$"
# Join REQUIRED with RS separator (|) — none of the patterns contain |
_pat_list="$(IFS='|'; printf '%s' "${REQUIRED[*]}")"
printf '%s\n' "$existing" | awk -v pat_list="$_pat_list" '
BEGIN {
n_pat = split(pat_list, ordered, "|")
for (i = 1; i <= n_pat; i++) want[ordered[i]] = 1
}
{ lines[++n]=$0 }
END {
emitted[""] = 1
for (i = 1; i <= n; i++) {
if (!want[lines[i]] && !emitted[lines[i]]) {
print lines[i]
emitted[lines[i]] = 1
}
}
for (i = 1; i <= n_pat; i++) {
if (!emitted[ordered[i]]) {
print ordered[i]
emitted[ordered[i]] = 1
}
}
}
' > "$GITIGNORE_TMP" && mv "$GITIGNORE_TMP" "$GITIGNORE"

View file

@ -1,29 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
ENV_FILE="${CLAUDE_PLUGIN_ROOT}/.env"
[ -f "$ENV_FILE" ] || exit 0
input=$(cat)
tool_name=$(echo "$input" | jq -r '.tool_name // ""')
tool_input=$(echo "$input" | jq -r '.tool_input // {}')
touched_env=false
case "$tool_name" in
Write|Edit|MultiEdit)
file_path=$(echo "$tool_input" | jq -r '.file_path // ""')
[[ "$file_path" == *".env"* ]] && touched_env=true
;;
Bash)
command=$(echo "$tool_input" | jq -r '.command // ""')
[[ "$command" == *".env"* ]] && touched_env=true
;;
esac
if [ "$touched_env" = true ]; then
chmod 600 "$ENV_FILE"
for bak in "${CLAUDE_PLUGIN_ROOT}/backups"/.env.bak.*; do
[ -f "$bak" ] && chmod 600 "$bak"
done
fi

View file

@ -1,58 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
ENV_FILE="${CLAUDE_PLUGIN_ROOT}/.env"
BACKUP_DIR="${CLAUDE_PLUGIN_ROOT}/backups"
LOCK_FILE="${CLAUDE_PLUGIN_ROOT}/.sync-env.lock"
mkdir -p "$BACKUP_DIR"
# Serialize concurrent sessions (two tabs starting at the same time)
exec 9>"$LOCK_FILE"
flock -w 10 9 || { echo "sync-env: failed to acquire lock after 10s" >&2; exit 1; }
declare -A MANAGED=(
[UNRAID_API_URL]="${CLAUDE_PLUGIN_OPTION_UNRAID_API_URL:-}"
[UNRAID_API_KEY]="${CLAUDE_PLUGIN_OPTION_UNRAID_API_KEY:-}"
[UNRAID_MCP_URL]="${CLAUDE_PLUGIN_OPTION_UNRAID_MCP_URL:-}"
[UNRAID_MCP_BEARER_TOKEN]="${CLAUDE_PLUGIN_OPTION_UNRAID_MCP_TOKEN:-}"
)
touch "$ENV_FILE"
chmod 600 "$ENV_FILE"
# Backup before writing (max 3 retained)
if [ -s "$ENV_FILE" ]; then
cp "$ENV_FILE" "${BACKUP_DIR}/.env.bak.$(date +%s)"
fi
# Write managed keys — awk handles arbitrary values safely (no delimiter injection)
for key in "${!MANAGED[@]}"; do
value="${MANAGED[$key]}"
[ -z "$value" ] && continue
if [[ "$value" == *$'\n'* || "$value" == *$'\r'* || "$value" == *$'\t'* ]]; then
echo "sync-env: refusing ${key} with control characters" >&2
exit 1
fi
if grep -q "^${key}=" "$ENV_FILE" 2>/dev/null; then
awk -v k="$key" -v v="$value" '$0 ~ "^"k"=" { print k"="v; next } { print }' \
"$ENV_FILE" > "${ENV_FILE}.tmp" && mv "${ENV_FILE}.tmp" "$ENV_FILE"
else
echo "${key}=${value}" >> "$ENV_FILE"
fi
done
# Fail if bearer token is not set — do NOT auto-generate.
# Auto-generated tokens cause a mismatch: the server reads the generated token
# but Claude Code sends the (empty) userConfig value. Every MCP call returns 401.
if ! grep -qE "^UNRAID_MCP_BEARER_TOKEN=.+" "$ENV_FILE" 2>/dev/null; then
echo "sync-env: ERROR — UNRAID_MCP_BEARER_TOKEN is not set." >&2
echo " Generate one: openssl rand -hex 32" >&2
echo " Then paste it into the plugin's userConfig MCP token field." >&2
exit 1
fi
chmod 600 "$ENV_FILE"
mapfile -t baks < <(ls -t "${BACKUP_DIR}"/.env.bak.* 2>/dev/null)
for bak in "${baks[@]}"; do chmod 600 "$bak"; done
for bak in "${baks[@]:3}"; do rm -f "$bak"; done

18
lefthook.yml Normal file
View file

@ -0,0 +1,18 @@
pre-commit:
parallel: true
commands:
diff_check:
run: git diff --check --cached
yaml:
glob: "*.{yml,yaml}"
run: uv run python -c 'import sys, yaml; [yaml.safe_load(open(path, "r", encoding="utf-8")) for path in sys.argv[1:]]' {staged_files}
lint:
run: just lint
format:
run: just fmt
typecheck:
run: just typecheck
skills:
run: just validate-skills
env_guard:
run: bash bin/block-env-commits.sh

View file

@ -10,7 +10,7 @@ build-backend = "hatchling.build"
# ============================================================================
[project]
name = "unraid-mcp"
version = "1.3.6"
version = "1.3.8"
description = "MCP Server for Unraid API - provides tools to interact with an Unraid server's GraphQL API"
readme = "README.md"
license = {file = "LICENSE"}

View file

@ -1,145 +0,0 @@
#!/usr/bin/env bash
# check-docker-security.sh — Verify Dockerfile follows plugin security conventions
# Run standalone: bash scripts/check-docker-security.sh [path/to/Dockerfile]
# Run in pre-commit: add as a hook (see .pre-commit-config.yaml example in plugin-setup-guide)
#
# Checks:
# 1. Multi-stage build (separate builder + runtime stages)
# 2. Non-root user (USER 1000:1000 or ${PUID}:${PGID})
# 3. No sensitive ENV directives baked into the image
# 4. HEALTHCHECK present
set -euo pipefail
PASS=0
FAIL=0
WARN=0
pass() { echo " ✓ PASS: $1"; PASS=$((PASS + 1)); }
fail() { echo " ✗ FAIL: $1$2"; FAIL=$((FAIL + 1)); }
warn() { echo " ⚠ WARN: $1$2"; WARN=$((WARN + 1)); }
# Find Dockerfile
DOCKERFILE="${1:-Dockerfile}"
if [[ ! -f "$DOCKERFILE" ]]; then
echo "Error: $DOCKERFILE not found" >&2
exit 1
fi
echo "=== Docker Security Check: $DOCKERFILE ==="
# ── 1. Multi-stage build ─────────────────────────────────────────────────────
FROM_COUNT=$(grep -cE '^FROM\s' "$DOCKERFILE" || true)
if [[ "$FROM_COUNT" -ge 2 ]]; then
pass "Multi-stage build ($FROM_COUNT stages)"
else
fail "Multi-stage build" "Found $FROM_COUNT FROM directive(s) — need at least 2 (builder + runtime)"
fi
# Check for named stages
if grep -qE '^FROM\s.+\sAS\s+builder' "$DOCKERFILE"; then
pass "Named builder stage"
else
warn "Named builder stage" "No 'FROM ... AS builder' found — recommend naming stages"
fi
if grep -qE '^FROM\s.+\sAS\s+runtime' "$DOCKERFILE"; then
pass "Named runtime stage"
else
warn "Named runtime stage" "No 'FROM ... AS runtime' found — recommend naming stages"
fi
# ── 2. Non-root user ─────────────────────────────────────────────────────────
# Check for USER directive
if grep -qE '^USER\s' "$DOCKERFILE"; then
USER_LINE=$(grep -E '^USER\s' "$DOCKERFILE" | tail -1)
USER_VALUE=$(echo "$USER_LINE" | sed 's/^USER\s*//')
# Check for 1000:1000 or variable-based UID:GID
if echo "$USER_VALUE" | grep -qE '^\$?\{?PUID|1000:1000|1000$'; then
pass "Non-root user ($USER_VALUE)"
else
warn "Non-root user" "USER is '$USER_VALUE' — expected 1000:1000 or \${PUID}:\${PGID}"
fi
else
# Check if docker-compose.yaml handles it via user: directive
if [[ -f "docker-compose.yaml" ]] && grep -qE '^\s+user:' docker-compose.yaml; then
warn "Non-root user" "No USER in Dockerfile but docker-compose.yaml sets user: — acceptable if always run via compose"
else
fail "Non-root user" "No USER directive found — container runs as root"
fi
fi
# Check there's no USER root after the runtime stage
RUNTIME_START=$(grep -nE '^FROM\s.+\sAS\s+runtime' "$DOCKERFILE" | head -1 | cut -d: -f1 || true)
if [[ -n "$RUNTIME_START" ]]; then
if tail -n +"$RUNTIME_START" "$DOCKERFILE" | grep -qE '^USER\s+root'; then
fail "No root in runtime" "USER root found after runtime stage — never run as root in production"
else
pass "No root in runtime stage"
fi
fi
# ── 3. No sensitive ENV baked in ──────────────────────────────────────────────
SENSITIVE_PATTERNS='(API_KEY|TOKEN|SECRET|PASSWORD|CREDENTIAL|PRIVATE_KEY|AUTH)'
BAKED_ENVS=$(grep -nE "^ENV\s+.*${SENSITIVE_PATTERNS}" "$DOCKERFILE" || true)
if [[ -n "$BAKED_ENVS" ]]; then
fail "No baked secrets" "Sensitive ENV directives found in Dockerfile:"
echo "$BAKED_ENVS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No baked secrets in ENV directives"
fi
# Check for ARG with defaults that look like secrets
BAKED_ARGS=$(grep -nE "^ARG\s+.*${SENSITIVE_PATTERNS}.*=" "$DOCKERFILE" || true)
if [[ -n "$BAKED_ARGS" ]]; then
warn "No baked ARG secrets" "ARG with sensitive defaults found (may leak via docker history):"
echo "$BAKED_ARGS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No baked secrets in ARG defaults"
fi
# ── 4. HEALTHCHECK ────────────────────────────────────────────────────────────
if grep -qE '^HEALTHCHECK\s' "$DOCKERFILE"; then
pass "HEALTHCHECK directive present"
if grep -qE '/health' "$DOCKERFILE"; then
pass "HEALTHCHECK uses /health endpoint"
else
warn "HEALTHCHECK endpoint" "HEALTHCHECK doesn't reference /health — ensure it matches your health endpoint"
fi
else
warn "HEALTHCHECK" "No HEALTHCHECK in Dockerfile — relying on docker-compose healthcheck only"
fi
# ── 5. Dependency layer caching ───────────────────────────────────────────────
# Check that manifest files are copied before source (for layer caching)
COPY_LINES=$(grep -nE '^COPY\s' "$DOCKERFILE" || true)
FIRST_MANIFEST_COPY=""
FIRST_SOURCE_COPY=""
while IFS= read -r line; do
linenum=$(echo "$line" | cut -d: -f1)
content=$(echo "$line" | cut -d: -f2-)
if echo "$content" | grep -qE '(pyproject\.toml|package.*\.json|Cargo\.(toml|lock)|go\.(mod|sum)|uv\.lock)'; then
[[ -z "$FIRST_MANIFEST_COPY" ]] && FIRST_MANIFEST_COPY="$linenum"
elif echo "$content" | grep -qE '\.\s+\.|src/|lib/'; then
[[ -z "$FIRST_SOURCE_COPY" ]] && FIRST_SOURCE_COPY="$linenum"
fi
done <<< "$COPY_LINES"
if [[ -n "$FIRST_MANIFEST_COPY" && -n "$FIRST_SOURCE_COPY" ]]; then
if [[ "$FIRST_MANIFEST_COPY" -lt "$FIRST_SOURCE_COPY" ]]; then
pass "Dependency manifest copied before source (layer caching)"
else
warn "Layer caching" "Source copied before dependency manifest — swap order for better Docker layer caching"
fi
fi
# ── Summary ───────────────────────────────────────────────────────────────────
echo
echo "Results: $PASS passed, $FAIL failed, $WARN warnings"
[[ "$FAIL" -eq 0 ]] && echo "DOCKER SECURITY CHECK PASSED" && exit 0
echo "DOCKER SECURITY CHECK FAILED" && exit 1

View file

@ -1,138 +0,0 @@
#!/usr/bin/env bash
# check-no-baked-env.sh — Verify env vars aren't baked into Docker artifacts
# Run standalone: bash scripts/check-no-baked-env.sh [project-dir]
# Run in pre-commit: add as a hook (see .pre-commit-config.yaml example in plugin-setup-guide)
#
# Checks:
# 1. docker-compose.yaml has no `environment:` block (all config via env_file only)
# 2. Dockerfile has no ENV with real/sensitive values
# 3. No hardcoded URLs, tokens, or credentials in Dockerfile or docker-compose.yaml
set -euo pipefail
PROJECT_DIR="${1:-.}"
PASS=0
FAIL=0
WARN=0
pass() { echo " ✓ PASS: $1"; PASS=$((PASS + 1)); }
fail() { echo " ✗ FAIL: $1$2"; FAIL=$((FAIL + 1)); }
warn() { echo " ⚠ WARN: $1$2"; WARN=$((WARN + 1)); }
echo "=== No Baked Env Vars Check: $PROJECT_DIR ==="
# ── 1. docker-compose.yaml — no environment: block ───────────────────────────
COMPOSE_FILE="$PROJECT_DIR/docker-compose.yaml"
if [[ -f "$COMPOSE_FILE" ]]; then
# Check for environment: key under services
if grep -qE '^\s+environment:' "$COMPOSE_FILE"; then
fail "No environment: block in docker-compose.yaml" \
"Found 'environment:' block — all env vars must come from env_file: .env only"
echo " Offending lines:"
grep -nE '^\s+environment:|^\s+-\s+\w+=' "$COMPOSE_FILE" | head -10 | while IFS= read -r line; do
echo " $line"
done
echo
echo " Fix: Remove the environment: block entirely."
echo " Add all variables to .env and .env.example instead."
echo " docker-compose.yaml should only use 'env_file: .env'"
else
pass "No environment: block in docker-compose.yaml"
fi
# Verify env_file is present
if grep -qE '^\s+env_file:' "$COMPOSE_FILE"; then
pass "env_file: directive present"
else
fail "env_file: directive" "No env_file: found — services won't receive credentials"
fi
# Check for hardcoded values in compose environment blocks (not variable references)
# Filter: lines that set KEY=VALUE where VALUE doesn't start with $ (variable ref)
HARDCODED=$(grep -nE '^\s+-\s+\w+=[^$]' "$COMPOSE_FILE" | grep -vE '=(true|false)$' || true)
if [[ -n "$HARDCODED" ]]; then
# Filter out known safe patterns
SUSPICIOUS=$(echo "$HARDCODED" | grep -vE '(build:|image:|container_name:|restart:|test:|interval:|timeout:|retries:|start_period:|memory:|cpus:|name:)' || true)
if [[ -n "$SUSPICIOUS" ]]; then
warn "Hardcoded values in compose" "Found potentially hardcoded values:"
echo "$SUSPICIOUS" | head -5 | while IFS= read -r line; do
echo " $line"
done
fi
fi
else
warn "docker-compose.yaml" "File not found at $COMPOSE_FILE — skipping compose checks"
fi
# ── 2. Dockerfile — no sensitive ENV values ───────────────────────────────────
DOCKERFILE="$PROJECT_DIR/Dockerfile"
if [[ -f "$DOCKERFILE" ]]; then
# Sensitive patterns that should never be in ENV
SENSITIVE_RE='(API_KEY|TOKEN|SECRET|PASSWORD|CREDENTIAL|PRIVATE_KEY|AUTH_TOKEN|BEARER)'
# Check ENV directives for sensitive variable names with values
SENSITIVE_ENVS=$(grep -nE "^ENV\s+\S*${SENSITIVE_RE}\S*\s*=" "$DOCKERFILE" || true)
if [[ -n "$SENSITIVE_ENVS" ]]; then
fail "No sensitive ENV in Dockerfile" "Found ENV directives with sensitive variable names:"
echo "$SENSITIVE_ENVS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No sensitive ENV in Dockerfile"
fi
# Check for ENV with hardcoded URLs (might contain credentials)
URL_ENVS=$(grep -nE '^ENV\s+\S+\s*=\s*https?://' "$DOCKERFILE" || true)
if [[ -n "$URL_ENVS" ]]; then
warn "Hardcoded URLs in ENV" "Found ENV with hardcoded URLs (may contain credentials):"
echo "$URL_ENVS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No hardcoded URLs in ENV"
fi
# Check for COPY .env into image
if grep -qE '^COPY\s+.*\.env\s' "$DOCKERFILE"; then
fail "No .env in image" "Dockerfile copies .env into the image — credentials will be baked in"
else
pass "No .env copied into image"
fi
# Check .dockerignore excludes .env
DOCKERIGNORE="$PROJECT_DIR/.dockerignore"
if [[ -f "$DOCKERIGNORE" ]]; then
if grep -qE '^\s*\.env\s*$' "$DOCKERIGNORE"; then
pass ".dockerignore excludes .env"
else
fail ".dockerignore" ".env not excluded — secrets may leak into build context"
fi
else
warn ".dockerignore" "File not found — create one that excludes .env"
fi
else
warn "Dockerfile" "File not found at $DOCKERFILE — skipping Dockerfile checks"
fi
# ── 3. entrypoint.sh — no hardcoded credentials ──────────────────────────────
ENTRYPOINT="$PROJECT_DIR/entrypoint.sh"
if [[ -f "$ENTRYPOINT" ]]; then
CRED_PATTERNS='(password|secret|token|api.key)\s*=\s*["\x27][^$]'
HARDCODED_CREDS=$(grep -inE "$CRED_PATTERNS" "$ENTRYPOINT" || true)
if [[ -n "$HARDCODED_CREDS" ]]; then
fail "No hardcoded creds in entrypoint.sh" "Found suspicious hardcoded values:"
echo "$HARDCODED_CREDS" | while IFS= read -r line; do
echo " $line"
done
else
pass "No hardcoded credentials in entrypoint.sh"
fi
else
# entrypoint.sh is optional
true
fi
# ── Summary ───────────────────────────────────────────────────────────────────
echo
echo "Results: $PASS passed, $FAIL failed, $WARN warnings"
[[ "$FAIL" -eq 0 ]] && echo "NO BAKED ENV CHECK PASSED" && exit 0
echo "NO BAKED ENV CHECK FAILED" && exit 1

View file

@ -1,181 +0,0 @@
#!/usr/bin/env bash
# check-outdated-deps.sh — Report outdated dependencies for Python/TypeScript/Rust projects
# Run standalone: bash scripts/check-outdated-deps.sh [project-dir]
#
# Auto-detects language from manifest files and reports outdated packages.
# Exit code: 0 = all current, 1 = outdated found, 2 = tool error
#
# Not recommended for pre-commit (requires network, slow). Run periodically or in CI.
set -euo pipefail
PROJECT_DIR="${1:-.}"
FOUND_OUTDATED=0
CHECKED=0
echo "=== Outdated Dependencies Check: $PROJECT_DIR ==="
echo
# ── Python (uv) ──────────────────────────────────────────────────────────────
if [[ -f "$PROJECT_DIR/pyproject.toml" ]]; then
CHECKED=$((CHECKED + 1))
echo "── Python (uv) ──"
if command -v uv &>/dev/null; then
# Check if lock file is current
if [[ -f "$PROJECT_DIR/uv.lock" ]]; then
if (cd "$PROJECT_DIR" && uv lock --check 2>/dev/null); then
echo " ✓ uv.lock is up to date"
else
echo " ⚠ uv.lock is out of sync with pyproject.toml — run 'uv lock'"
FOUND_OUTDATED=1
fi
fi
# Show outdated packages
echo " Checking for outdated packages..."
OUTDATED=$(cd "$PROJECT_DIR" && uv pip list --outdated 2>/dev/null || true)
if [[ -n "$OUTDATED" && "$OUTDATED" != *"No outdated packages"* ]]; then
LINE_COUNT=$(echo "$OUTDATED" | wc -l)
if [[ "$LINE_COUNT" -gt 2 ]]; then # Header lines
echo "$OUTDATED" | head -20
FOUND_OUTDATED=1
else
echo " ✓ All Python packages are current"
fi
else
echo " ✓ All Python packages are current"
fi
# Check pyproject.toml for pinned versions that may be outdated
echo " Checking pyproject.toml dependency pins..."
PINNED=$(grep -E '^\s*"[^"]+==\d' "$PROJECT_DIR/pyproject.toml" 2>/dev/null || true)
if [[ -n "$PINNED" ]]; then
echo " ⚠ Found exact-pinned dependencies (consider using >= or ~=):"
echo "$PINNED" | head -10 | while IFS= read -r line; do
echo " $line"
done
fi
else
echo " ⚠ uv not found — install with: curl -LsSf https://astral.sh/uv/install.sh | sh"
fi
echo
fi
# ── TypeScript / JavaScript (npm) ────────────────────────────────────────────
if [[ -f "$PROJECT_DIR/package.json" ]]; then
CHECKED=$((CHECKED + 1))
echo "── TypeScript / JavaScript ──"
if command -v npm &>/dev/null; then
echo " Checking for outdated packages..."
OUTDATED=$(cd "$PROJECT_DIR" && npm outdated --json 2>/dev/null || true)
if [[ -n "$OUTDATED" && "$OUTDATED" != "{}" ]]; then
# Parse JSON output for readable display
echo "$OUTDATED" | python3 -c "
import json, sys
try:
data = json.load(sys.stdin)
if data:
print(f' Found {len(data)} outdated package(s):')
print(f' {\"Package\":<30} {\"Current\":<15} {\"Wanted\":<15} {\"Latest\":<15}')
print(f' {\"─\"*30} {\"─\"*15} {\"─\"*15} {\"─\"*15}')
for pkg, info in sorted(data.items()):
current = info.get('current', '?')
wanted = info.get('wanted', '?')
latest = info.get('latest', '?')
marker = ' ← MAJOR' if current.split('.')[0] != latest.split('.')[0] else ''
print(f' {pkg:<30} {current:<15} {wanted:<15} {latest:<15}{marker}')
except (json.JSONDecodeError, KeyError):
print(' ⚠ Could not parse npm outdated output')
" 2>/dev/null || echo " ⚠ Could not parse npm outdated output"
FOUND_OUTDATED=1
else
echo " ✓ All npm packages are current"
fi
# Check for npm audit vulnerabilities
echo " Checking for known vulnerabilities..."
AUDIT=$(cd "$PROJECT_DIR" && npm audit --json 2>/dev/null || true)
VULN_COUNT=$(echo "$AUDIT" | python3 -c "
import json, sys
try:
data = json.load(sys.stdin)
total = data.get('metadata', {}).get('vulnerabilities', {})
count = sum(v for k, v in total.items() if k != 'info')
print(count)
except:
print(0)
" 2>/dev/null || echo "0")
if [[ "$VULN_COUNT" -gt 0 ]]; then
echo " ⚠ Found $VULN_COUNT known vulnerabilities — run 'npm audit' for details"
else
echo " ✓ No known vulnerabilities"
fi
else
echo " ⚠ npm not found"
fi
echo
fi
# ── Rust (cargo) ──────────────────────────────────────────────────────────────
if [[ -f "$PROJECT_DIR/Cargo.toml" ]]; then
CHECKED=$((CHECKED + 1))
echo "── Rust (cargo) ──"
if command -v cargo &>/dev/null; then
# Check if cargo-outdated is installed
if cargo outdated --version &>/dev/null 2>&1; then
echo " Checking for outdated crates..."
OUTDATED=$(cd "$PROJECT_DIR" && cargo outdated --root-deps-only 2>/dev/null || true)
if echo "$OUTDATED" | grep -qE '^\w'; then
echo "$OUTDATED" | head -20
FOUND_OUTDATED=1
else
echo " ✓ All Rust crates are current"
fi
else
echo " ⚠ cargo-outdated not installed — install with: cargo install cargo-outdated"
echo " Falling back to Cargo.lock age check..."
if [[ -f "$PROJECT_DIR/Cargo.lock" ]]; then
LOCK_AGE_DAYS=$(( ($(date +%s) - $(stat -c %Y "$PROJECT_DIR/Cargo.lock")) / 86400 ))
if [[ "$LOCK_AGE_DAYS" -gt 30 ]]; then
echo " ⚠ Cargo.lock is $LOCK_AGE_DAYS days old — consider running 'cargo update'"
else
echo " ✓ Cargo.lock updated within last 30 days ($LOCK_AGE_DAYS days ago)"
fi
fi
fi
# Check for cargo audit
if cargo audit --version &>/dev/null 2>&1; then
echo " Checking for known vulnerabilities..."
if (cd "$PROJECT_DIR" && cargo audit --quiet 2>/dev/null); then
echo " ✓ No known vulnerabilities"
else
echo " ⚠ Vulnerabilities found — run 'cargo audit' for details"
FOUND_OUTDATED=1
fi
else
echo " ⚠ cargo-audit not installed — install with: cargo install cargo-audit"
fi
else
echo " ⚠ cargo not found"
fi
echo
fi
# ── Summary ───────────────────────────────────────────────────────────────────
if [[ "$CHECKED" -eq 0 ]]; then
echo "No recognized project manifests found (pyproject.toml, package.json, Cargo.toml)"
exit 2
fi
echo "=== Summary ==="
if [[ "$FOUND_OUTDATED" -eq 0 ]]; then
echo "All dependencies are current across $CHECKED project(s)."
exit 0
else
echo "Outdated dependencies found. Review above and update as needed."
exit 1
fi

View file

@ -1,267 +0,0 @@
#!/usr/bin/env bash
# ensure-ignore-files.sh — Ensure .gitignore and .dockerignore have all required patterns
#
# Modes:
# (default) Append missing patterns to the files (SessionStart hook)
# --check Report missing patterns and exit non-zero if any are missing (pre-commit/CI)
#
# Usage:
# bash scripts/ensure-ignore-files.sh [--check] [project-dir]
#
# As a plugin hook:
# "command": "${CLAUDE_PLUGIN_ROOT}/hooks/scripts/ensure-ignore-files.sh"
set -euo pipefail
CHECK_MODE=false
if [[ "${1:-}" == "--check" ]]; then
CHECK_MODE=true
shift
fi
PROJECT_DIR="${1:-${CLAUDE_PLUGIN_ROOT:-.}}"
PASS=0
FAIL=0
WARN=0
pass() { PASS=$((PASS + 1)); if $CHECK_MODE; then echo "$1"; fi; }
fail() { FAIL=$((FAIL + 1)); echo " ✗ FAIL: $1$2"; }
warn() { WARN=$((WARN + 1)); if $CHECK_MODE; then echo " ⚠ WARN: $1$2"; fi; }
ensure_pattern() {
local file="$1"
local pattern="$2"
local label="$3"
if grep -qxF "$pattern" "$file" 2>/dev/null; then
pass "$label: '$pattern'"
elif $CHECK_MODE; then
fail "$label: '$pattern'" "missing"
else
echo "$pattern" >> "$file"
pass "$label: '$pattern' (added)"
fi
}
# ═══════════════════════════════════════════════════════════════════════════════
# .gitignore — full required pattern list from plugin-setup-guide
# ═══════════════════════════════════════════════════════════════════════════════
GITIGNORE="$PROJECT_DIR/.gitignore"
if $CHECK_MODE; then echo "=== Ignore Files Check: $PROJECT_DIR ==="; echo "── .gitignore ──"; fi
if [[ ! -f "$GITIGNORE" ]] && $CHECK_MODE; then
fail ".gitignore" "File not found — every plugin repo must have a .gitignore"
else
touch "$GITIGNORE"
# ── Secrets ──
REQUIRED_GIT=(
".env"
".env.*"
"!.env.example"
)
# ── Runtime / hook artifacts ──
REQUIRED_GIT+=(
"*.log"
)
# ── Claude Code / AI tooling ──
REQUIRED_GIT+=(
".claude/settings.local.json"
".claude/worktrees/"
".omc/"
".lavra/"
".beads/"
".serena/"
".worktrees"
".full-review/"
".full-review-archive-*"
)
# ── IDE / editor ──
REQUIRED_GIT+=(
".vscode/"
".cursor/"
".windsurf/"
".1code/"
)
# ── Caches ──
REQUIRED_GIT+=(
".cache/"
)
# ── Documentation artifacts ──
REQUIRED_GIT+=(
"docs/plans/"
"docs/sessions/"
"docs/reports/"
"docs/research/"
"docs/superpowers/"
)
for pattern in "${REQUIRED_GIT[@]}"; do
ensure_pattern "$GITIGNORE" "$pattern" ".gitignore"
done
# ── Language-specific (check only, don't auto-add — user must uncomment) ──
if $CHECK_MODE; then
if [[ -f "$PROJECT_DIR/pyproject.toml" ]]; then
echo " Detected: Python project"
for p in ".venv/" "__pycache__/" "*.py[oc]" "*.egg-info/" "dist/" "build/"; do
if grep -qxF "$p" "$GITIGNORE" 2>/dev/null; then
pass ".gitignore (Python): '$p'"
else
warn ".gitignore (Python)" "'$p' not found — uncomment Python section"
fi
done
fi
if [[ -f "$PROJECT_DIR/package.json" ]]; then
echo " Detected: TypeScript/JavaScript project"
for p in "node_modules/" "dist/" "build/"; do
if grep -qxF "$p" "$GITIGNORE" 2>/dev/null; then
pass ".gitignore (TypeScript): '$p'"
else
warn ".gitignore (TypeScript)" "'$p' not found — uncomment TS section"
fi
done
fi
if [[ -f "$PROJECT_DIR/Cargo.toml" ]]; then
echo " Detected: Rust project"
for p in "target/"; do
if grep -qxF "$p" "$GITIGNORE" 2>/dev/null; then
pass ".gitignore (Rust): '$p'"
else
warn ".gitignore (Rust)" "'$p' not found — uncomment Rust section"
fi
done
fi
# Verify .env.example is NOT ignored
if git -C "$PROJECT_DIR" check-ignore .env.example > /dev/null 2>&1; then
fail ".gitignore" ".env.example is being ignored — '!.env.example' must come after '.env.*'"
else
pass ".gitignore: .env.example is tracked (not ignored)"
fi
fi
fi
# ═══════════════════════════════════════════════════════════════════════════════
# .dockerignore — full required pattern list from plugin-setup-guide
# ═══════════════════════════════════════════════════════════════════════════════
DOCKERIGNORE="$PROJECT_DIR/.dockerignore"
# Skip if no Dockerfile
if [[ ! -f "$PROJECT_DIR/Dockerfile" ]]; then
if $CHECK_MODE; then echo; echo "── .dockerignore ──"; echo " No Dockerfile found — skipping"; fi
else
if $CHECK_MODE; then echo; echo "── .dockerignore ──"; fi
if [[ ! -f "$DOCKERIGNORE" ]] && $CHECK_MODE; then
fail ".dockerignore" "File not found — required when Dockerfile exists"
else
touch "$DOCKERIGNORE"
# ── Version control ──
REQUIRED_DOCKER=(
".git"
".github"
)
# ── Secrets ──
REQUIRED_DOCKER+=(
".env"
".env.*"
"!.env.example"
)
# ── Claude Code / AI tooling ──
REQUIRED_DOCKER+=(
".claude"
".claude-plugin"
".codex-plugin"
".omc"
".lavra"
".beads"
".serena"
".worktrees"
".full-review"
".full-review-archive-*"
)
# ── IDE / editor ──
REQUIRED_DOCKER+=(
".vscode"
".cursor"
".windsurf"
".1code"
)
# ── Docs, tests, scripts — not needed at runtime ──
REQUIRED_DOCKER+=(
"docs"
"tests"
"scripts"
"*.md"
"!README.md"
)
# ── Runtime artifacts ──
REQUIRED_DOCKER+=(
"logs"
"backups"
"*.log"
".cache"
)
for pattern in "${REQUIRED_DOCKER[@]}"; do
ensure_pattern "$DOCKERIGNORE" "$pattern" ".dockerignore"
done
# ── Language-specific (check only) ──
if $CHECK_MODE; then
if [[ -f "$PROJECT_DIR/pyproject.toml" ]]; then
for p in ".venv" "__pycache__/" "*.py[oc]" "*.egg-info" "dist/"; do
if grep -qxF "$p" "$DOCKERIGNORE" 2>/dev/null; then
pass ".dockerignore (Python): '$p'"
else
warn ".dockerignore (Python)" "'$p' not found — uncomment Python section"
fi
done
fi
if [[ -f "$PROJECT_DIR/package.json" ]]; then
for p in "node_modules/" "dist/" "coverage/"; do
if grep -qxF "$p" "$DOCKERIGNORE" 2>/dev/null; then
pass ".dockerignore (TypeScript): '$p'"
else
warn ".dockerignore (TypeScript)" "'$p' not found — uncomment TS section"
fi
done
fi
if [[ -f "$PROJECT_DIR/Cargo.toml" ]]; then
for p in "target/"; do
if grep -qxF "$p" "$DOCKERIGNORE" 2>/dev/null; then
pass ".dockerignore (Rust): '$p'"
else
warn ".dockerignore (Rust)" "'$p' not found — uncomment Rust section"
fi
done
fi
fi
fi
fi
# ═══════════════════════════════════════════════════════════════════════════════
# Summary
# ═══════════════════════════════════════════════════════════════════════════════
if $CHECK_MODE; then
echo
echo "Results: $PASS passed, $FAIL failed, $WARN warnings"
[[ "$FAIL" -eq 0 ]] && echo "IGNORE FILES CHECK PASSED" && exit 0
echo "IGNORE FILES CHECK FAILED" && exit 1
fi

View file

@ -1,860 +0,0 @@
#!/usr/bin/env python3
"""Generate canonical Unraid GraphQL docs from live introspection."""
from __future__ import annotations
import argparse
import datetime as dt
import json
import os
from collections import Counter, defaultdict
from pathlib import Path
from typing import Any
import httpx
from graphql import build_client_schema, print_schema
DOCS_DIR = Path("docs/unraid")
DEFAULT_COMPLETE_OUTPUT = DOCS_DIR / "UNRAID-API-COMPLETE-REFERENCE.md"
DEFAULT_SUMMARY_OUTPUT = DOCS_DIR / "UNRAID-API-SUMMARY.md"
DEFAULT_INTROSPECTION_OUTPUT = DOCS_DIR / "UNRAID-API-INTROSPECTION.json"
DEFAULT_SCHEMA_OUTPUT = DOCS_DIR / "UNRAID-SCHEMA.graphql"
DEFAULT_CHANGES_OUTPUT = DOCS_DIR / "UNRAID-API-CHANGES.md"
LEGACY_INTROSPECTION_OUTPUT = Path("docs/unraid-api-introspection.json")
INTROSPECTION_QUERY = """
query FullIntrospection {
__schema {
queryType { name }
mutationType { name }
subscriptionType { name }
directives {
name
description
locations
args {
name
description
defaultValue
type { ...TypeRef }
}
}
types {
kind
name
description
fields(includeDeprecated: true) {
name
description
isDeprecated
deprecationReason
args {
name
description
defaultValue
type { ...TypeRef }
}
type { ...TypeRef }
}
inputFields {
name
description
defaultValue
type { ...TypeRef }
}
interfaces { kind name }
enumValues(includeDeprecated: true) {
name
description
isDeprecated
deprecationReason
}
possibleTypes { kind name }
}
}
}
fragment TypeRef on __Type {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
}
}
}
}
}
}
}
}
"""
def _clean(text: str | None) -> str:
"""Collapse multiline description text into a single line."""
if not text:
return ""
return " ".join(text.split())
def _type_to_str(type_ref: dict[str, Any] | None) -> str:
"""Render GraphQL nested type refs to SDL-like notation."""
if not type_ref:
return "Unknown"
kind = type_ref.get("kind")
if kind == "NON_NULL":
return f"{_type_to_str(type_ref.get('ofType'))}!"
if kind == "LIST":
return f"[{_type_to_str(type_ref.get('ofType'))}]"
return str(type_ref.get("name") or kind or "Unknown")
def _field_lines(field: dict[str, Any], *, is_input: bool) -> list[str]:
"""Render field/input-field markdown lines."""
lines: list[str] = []
lines.append(f"- `{field['name']}`: `{_type_to_str(field.get('type'))}`")
description = _clean(field.get("description"))
if description:
lines.append(f" - {description}")
default_value = field.get("defaultValue")
if default_value is not None:
lines.append(f" - Default: `{default_value}`")
if not is_input:
args = sorted(field.get("args") or [], key=lambda item: str(item["name"]))
if args:
lines.append(" - Arguments:")
for arg in args:
arg_line = f" - `{arg['name']}`: `{_type_to_str(arg.get('type'))}`"
if arg.get("defaultValue") is not None:
arg_line += f" (default: `{arg['defaultValue']}`)"
lines.append(arg_line)
arg_description = _clean(arg.get("description"))
if arg_description:
lines.append(f" - {arg_description}")
if field.get("isDeprecated"):
reason = _clean(field.get("deprecationReason"))
lines.append(f" - Deprecated: {reason}" if reason else " - Deprecated")
return lines
def _build_markdown(
schema: dict[str, Any],
*,
include_introspection: bool,
source: str,
generated_at: str,
) -> str:
"""Build full Markdown schema reference."""
all_types = schema.get("types") or []
types = [
item
for item in all_types
if item.get("name") and (include_introspection or not str(item["name"]).startswith("__"))
]
types_by_name = {str(item["name"]): item for item in types}
kind_counts = Counter(str(item.get("kind", "UNKNOWN")) for item in types)
directives = sorted(schema.get("directives") or [], key=lambda item: str(item["name"]))
implements_map: dict[str, list[str]] = defaultdict(list)
for item in types:
for interface in item.get("interfaces") or []:
interface_name = interface.get("name")
if interface_name:
implements_map[str(interface_name)].append(str(item["name"]))
query_root = (schema.get("queryType") or {}).get("name")
mutation_root = (schema.get("mutationType") or {}).get("name")
subscription_root = (schema.get("subscriptionType") or {}).get("name")
lines: list[str] = []
lines.append("# Unraid GraphQL API Complete Schema Reference")
lines.append("")
lines.append(f"> Generated from live GraphQL introspection on {generated_at}")
lines.append(f"> Source: {source}")
lines.append("")
lines.append("This is permission-scoped: it contains everything visible to the API key used.")
lines.append("")
lines.append("## Table of Contents")
lines.append("- [Schema Summary](#schema-summary)")
lines.append("- [Root Operations](#root-operations)")
lines.append("- [Directives](#directives)")
lines.append("- [All Types (Alphabetical)](#all-types-alphabetical)")
lines.append("")
lines.append("## Schema Summary")
lines.append(f"- Query root: `{query_root}`")
lines.append(f"- Mutation root: `{mutation_root}`")
lines.append(f"- Subscription root: `{subscription_root}`")
lines.append(f"- Total types: **{len(types)}**")
lines.append(f"- Total directives: **{len(directives)}**")
lines.append("- Type kinds:")
lines.extend(f"- `{kind}`: {kind_counts[kind]}" for kind in sorted(kind_counts))
lines.append("")
def render_root(root_name: str | None, label: str) -> None:
lines.append(f"### {label}")
if not root_name or root_name not in types_by_name:
lines.append("Not exposed.")
lines.append("")
return
root_type = types_by_name[root_name]
fields = sorted(root_type.get("fields") or [], key=lambda item: str(item["name"]))
lines.append(f"Total fields: **{len(fields)}**")
lines.append("")
for field in fields:
args = sorted(field.get("args") or [], key=lambda item: str(item["name"]))
arg_signature: list[str] = []
for arg in args:
part = f"{arg['name']}: {_type_to_str(arg.get('type'))}"
if arg.get("defaultValue") is not None:
part += f" = {arg['defaultValue']}"
arg_signature.append(part)
signature = (
f"{field['name']}({', '.join(arg_signature)})"
if arg_signature
else f"{field['name']}()"
)
lines.append(f"- `{signature}: {_type_to_str(field.get('type'))}`")
description = _clean(field.get("description"))
if description:
lines.append(f" - {description}")
if field.get("isDeprecated"):
reason = _clean(field.get("deprecationReason"))
lines.append(f" - Deprecated: {reason}" if reason else " - Deprecated")
lines.append("")
lines.append("## Root Operations")
render_root(query_root, "Queries")
render_root(mutation_root, "Mutations")
render_root(subscription_root, "Subscriptions")
lines.append("## Directives")
if not directives:
lines.append("No directives exposed.")
lines.append("")
else:
for directive in directives:
lines.append(f"### `@{directive['name']}`")
description = _clean(directive.get("description"))
if description:
lines.append(description)
lines.append("")
locations = directive.get("locations") or []
lines.append(
f"- Locations: {', '.join(f'`{item}`' for item in locations) if locations else 'None'}"
)
args = sorted(directive.get("args") or [], key=lambda item: str(item["name"]))
if args:
lines.append("- Arguments:")
for arg in args:
line = f" - `{arg['name']}`: `{_type_to_str(arg.get('type'))}`"
if arg.get("defaultValue") is not None:
line += f" (default: `{arg['defaultValue']}`)"
lines.append(line)
arg_description = _clean(arg.get("description"))
if arg_description:
lines.append(f" - {arg_description}")
lines.append("")
lines.append("## All Types (Alphabetical)")
for item in sorted(types, key=lambda row: str(row["name"])):
name = str(item["name"])
kind = str(item["kind"])
lines.append(f"### `{name}` ({kind})")
description = _clean(item.get("description"))
if description:
lines.append(description)
lines.append("")
if kind == "OBJECT":
interfaces = sorted(
str(interface["name"])
for interface in (item.get("interfaces") or [])
if interface.get("name")
)
if interfaces:
lines.append(f"- Implements: {', '.join(f'`{value}`' for value in interfaces)}")
fields = sorted(item.get("fields") or [], key=lambda row: str(row["name"]))
lines.append(f"- Fields ({len(fields)}):")
if fields:
for field in fields:
lines.extend(_field_lines(field, is_input=False))
else:
lines.append("- None")
elif kind == "INPUT_OBJECT":
fields = sorted(item.get("inputFields") or [], key=lambda row: str(row["name"]))
lines.append(f"- Input fields ({len(fields)}):")
if fields:
for field in fields:
lines.extend(_field_lines(field, is_input=True))
else:
lines.append("- None")
elif kind == "ENUM":
enum_values = sorted(item.get("enumValues") or [], key=lambda row: str(row["name"]))
lines.append(f"- Enum values ({len(enum_values)}):")
if enum_values:
for enum_value in enum_values:
lines.append(f" - `{enum_value['name']}`")
enum_description = _clean(enum_value.get("description"))
if enum_description:
lines.append(f" - {enum_description}")
if enum_value.get("isDeprecated"):
reason = _clean(enum_value.get("deprecationReason"))
lines.append(
f" - Deprecated: {reason}" if reason else " - Deprecated"
)
else:
lines.append("- None")
elif kind == "INTERFACE":
fields = sorted(item.get("fields") or [], key=lambda row: str(row["name"]))
lines.append(f"- Interface fields ({len(fields)}):")
if fields:
for field in fields:
lines.extend(_field_lines(field, is_input=False))
else:
lines.append("- None")
implementers = sorted(implements_map.get(name, []))
if implementers:
lines.append(
f"- Implemented by ({len(implementers)}): "
+ ", ".join(f"`{value}`" for value in implementers)
)
else:
lines.append("- Implemented by (0): None")
elif kind == "UNION":
possible_types = sorted(
str(possible["name"])
for possible in (item.get("possibleTypes") or [])
if possible.get("name")
)
if possible_types:
lines.append(
f"- Possible types ({len(possible_types)}): "
+ ", ".join(f"`{value}`" for value in possible_types)
)
else:
lines.append("- Possible types (0): None")
elif kind == "SCALAR":
lines.append("- Scalar type")
else:
lines.append("- Unhandled type kind")
lines.append("")
return "\n".join(lines).rstrip() + "\n"
def _visible_types(
schema: dict[str, Any], *, include_introspection: bool = False
) -> list[dict[str, Any]]:
"""Return visible types from the schema."""
types = schema.get("types") or []
return [
item
for item in types
if item.get("name") and (include_introspection or not str(item["name"]).startswith("__"))
]
def _types_by_name(
schema: dict[str, Any], *, include_introspection: bool = False
) -> dict[str, dict[str, Any]]:
"""Map visible types by name."""
return {
str(item["name"]): item
for item in _visible_types(schema, include_introspection=include_introspection)
}
def _field_signature(field: dict[str, Any]) -> str:
"""Render a stable field signature for change detection."""
args = sorted(field.get("args") or [], key=lambda item: str(item["name"]))
rendered_args = []
for arg in args:
arg_sig = f"{arg['name']}: {_type_to_str(arg.get('type'))}"
if arg.get("defaultValue") is not None:
arg_sig += f" = {arg['defaultValue']}"
rendered_args.append(arg_sig)
args_section = f"({', '.join(rendered_args)})" if rendered_args else "()"
return f"{field['name']}{args_section}: {_type_to_str(field.get('type'))}"
def _input_field_signature(field: dict[str, Any]) -> str:
"""Render a stable input field signature for change detection."""
signature = f"{field['name']}: {_type_to_str(field.get('type'))}"
if field.get("defaultValue") is not None:
signature += f" = {field['defaultValue']}"
return signature
def _enum_value_signature(enum_value: dict[str, Any]) -> str:
"""Render a stable enum value signature for change detection."""
signature = str(enum_value["name"])
if enum_value.get("isDeprecated"):
reason = _clean(enum_value.get("deprecationReason"))
signature += f" [deprecated: {reason}]" if reason else " [deprecated]"
return signature
def _root_field_names(schema: dict[str, Any], root_key: str) -> set[str]:
"""Return root field names for query/mutation/subscription."""
root_type = (schema.get(root_key) or {}).get("name")
if not root_type:
return set()
types = _types_by_name(schema)
root = types.get(str(root_type))
if not root:
return set()
return {str(field["name"]) for field in (root.get("fields") or [])}
def _type_member_signatures(type_info: dict[str, Any]) -> set[str]:
"""Return stable member signatures for a type."""
kind = str(type_info.get("kind", "UNKNOWN"))
if kind in {"OBJECT", "INTERFACE"}:
return {_field_signature(field) for field in (type_info.get("fields") or [])}
if kind == "INPUT_OBJECT":
return {_input_field_signature(field) for field in (type_info.get("inputFields") or [])}
if kind == "ENUM":
return {_enum_value_signature(value) for value in (type_info.get("enumValues") or [])}
if kind == "UNION":
return {
str(possible["name"])
for possible in (type_info.get("possibleTypes") or [])
if possible.get("name")
}
return set()
def _build_summary_markdown(
schema: dict[str, Any], *, source: str, generated_at: str, include_introspection: bool
) -> str:
"""Build condensed root-level summary markdown."""
types = _types_by_name(schema, include_introspection=include_introspection)
visible_types = _visible_types(schema, include_introspection=include_introspection)
directives = sorted(schema.get("directives") or [], key=lambda item: str(item["name"]))
kind_counts = Counter(str(item.get("kind", "UNKNOWN")) for item in visible_types)
query_root = (schema.get("queryType") or {}).get("name")
mutation_root = (schema.get("mutationType") or {}).get("name")
subscription_root = (schema.get("subscriptionType") or {}).get("name")
lines = [
"# Unraid API Introspection Summary",
"",
f"> Auto-generated from live API introspection on {generated_at}",
f"> Source: {source}",
"",
"## Table of Contents",
"",
"- [Schema Summary](#schema-summary)",
"- [Query Fields](#query-fields)",
"- [Mutation Fields](#mutation-fields)",
"- [Subscription Fields](#subscription-fields)",
"- [Type Kinds](#type-kinds)",
"",
"## Schema Summary",
f"- Query root: `{query_root}`",
f"- Mutation root: `{mutation_root}`",
f"- Subscription root: `{subscription_root}`",
f"- Total types: **{len(visible_types)}**",
f"- Total directives: **{len(directives)}**",
"",
]
def render_table(section_title: str, root_name: str | None) -> None:
lines.append(f"## {section_title}")
lines.append("")
lines.append("| Field | Return Type | Arguments |")
lines.append("|-------|-------------|-----------|")
root = types.get(str(root_name)) if root_name else None
for field in (
sorted(root.get("fields") or [], key=lambda item: str(item["name"])) if root else []
):
args = sorted(field.get("args") or [], key=lambda item: str(item["name"]))
arg_text = (
""
if not args
else ", ".join(
(
f"{arg['name']}: {_type_to_str(arg.get('type'))}"
+ (
f" (default: {arg['defaultValue']})"
if arg.get("defaultValue") is not None
else ""
)
)
for arg in args
)
)
lines.append(
f"| `{field['name']}` | `{_type_to_str(field.get('type'))}` | {arg_text} |"
)
lines.append("")
render_table("Query Fields", query_root)
render_table("Mutation Fields", mutation_root)
render_table("Subscription Fields", subscription_root)
lines.append("## Type Kinds")
lines.append("")
for kind in sorted(kind_counts):
lines.append(f"- `{kind}`: {kind_counts[kind]}") # noqa: PERF401
lines.extend(
[
"",
"## Notes",
"",
"- This summary is intentionally condensed; the full schema reference lives in `UNRAID-API-COMPLETE-REFERENCE.md`.",
"- Raw schema exports live in `UNRAID-API-INTROSPECTION.json` and `UNRAID-SCHEMA.graphql`.",
"",
]
)
return "\n".join(lines)
def _build_changes_markdown(
previous_schema: dict[str, Any] | None,
current_schema: dict[str, Any],
*,
source: str,
generated_at: str,
include_introspection: bool,
) -> str:
"""Build a schema change report from a previous introspection snapshot."""
lines = [
"# Unraid API Schema Changes",
"",
f"> Generated on {generated_at}",
f"> Source: {source}",
"",
]
if previous_schema is None:
lines.extend(
[
"No previous introspection snapshot was available, so no diff could be computed.",
"",
"The current canonical artifacts were regenerated successfully.",
"",
]
)
return "\n".join(lines)
current_types = _types_by_name(current_schema, include_introspection=include_introspection)
previous_types = _types_by_name(previous_schema, include_introspection=include_introspection)
sections = [
(
"Query fields",
_root_field_names(previous_schema, "queryType"),
_root_field_names(current_schema, "queryType"),
),
(
"Mutation fields",
_root_field_names(previous_schema, "mutationType"),
_root_field_names(current_schema, "mutationType"),
),
(
"Subscription fields",
_root_field_names(previous_schema, "subscriptionType"),
_root_field_names(current_schema, "subscriptionType"),
),
]
all_kinds = {"OBJECT", "INPUT_OBJECT", "ENUM", "INTERFACE", "UNION", "SCALAR"}
previous_by_kind = {
kind: {name for name, info in previous_types.items() if str(info.get("kind")) == kind}
for kind in all_kinds
}
current_by_kind = {
kind: {name for name, info in current_types.items() if str(info.get("kind")) == kind}
for kind in all_kinds
}
for label, old_set, new_set in sections:
added = sorted(new_set - old_set)
removed = sorted(old_set - new_set)
lines.append(f"## {label}")
lines.append("")
lines.append(f"- Added: {len(added)}")
if added:
lines.extend(f" - `{name}`" for name in added)
lines.append(f"- Removed: {len(removed)}")
if removed:
lines.extend(f" - `{name}`" for name in removed)
if not added and not removed:
lines.append("- No changes")
lines.append("")
lines.append("## Type Changes")
lines.append("")
for kind in sorted(all_kinds):
added = sorted(current_by_kind[kind] - previous_by_kind[kind])
removed = sorted(previous_by_kind[kind] - current_by_kind[kind])
if not added and not removed:
continue
lines.append(f"### {kind}")
lines.append("")
lines.append(f"- Added: {len(added)}")
if added:
lines.extend(f" - `{name}`" for name in added)
lines.append(f"- Removed: {len(removed)}")
if removed:
lines.extend(f" - `{name}`" for name in removed)
lines.append("")
changed_types: list[str] = []
for name in sorted(set(previous_types) & set(current_types)):
previous_info = previous_types[name]
current_info = current_types[name]
if str(previous_info.get("kind")) != str(current_info.get("kind")):
changed_types.append(name)
continue
if _type_member_signatures(previous_info) != _type_member_signatures(current_info):
changed_types.append(name)
lines.append("## Type Signature Changes")
lines.append("")
if not changed_types:
lines.append("No existing type signatures changed.")
lines.append("")
return "\n".join(lines)
for name in changed_types:
previous_info = previous_types[name]
current_info = current_types[name]
previous_members = _type_member_signatures(previous_info)
current_members = _type_member_signatures(current_info)
added = sorted(current_members - previous_members)
removed = sorted(previous_members - current_members)
lines.append(f"### `{name}` ({current_info.get('kind')})")
lines.append("")
lines.append(f"- Added members: {len(added)}")
if added:
lines.extend(f" - `{member}`" for member in added)
lines.append(f"- Removed members: {len(removed)}")
if removed:
lines.extend(f" - `{member}`" for member in removed)
if not added and not removed and previous_info.get("kind") != current_info.get("kind"):
lines.append(
f"- Kind changed: `{previous_info.get('kind')}` -> `{current_info.get('kind')}`"
)
lines.append("")
return "\n".join(lines)
def _extract_schema(payload: dict[str, Any]) -> dict[str, Any]:
"""Return the __schema payload or raise."""
schema = (payload.get("data") or {}).get("__schema")
if not schema:
raise SystemExit("GraphQL introspection returned no __schema payload.")
return schema
def _load_previous_schema(path: Path) -> dict[str, Any] | None:
"""Load a prior introspection snapshot if available."""
if not path.exists():
return None
payload = json.loads(path.read_text(encoding="utf-8"))
return _extract_schema(payload)
def _write_schema_graphql(path: Path, payload: dict[str, Any]) -> None:
"""Write SDL schema output."""
schema_graphql = print_schema(build_client_schema(payload["data"]))
banner = (
"# ------------------------------------------------------\n"
"# THIS FILE WAS AUTOMATICALLY GENERATED (DO NOT MODIFY)\n"
"# ------------------------------------------------------\n\n"
)
path.write_text(banner + schema_graphql.rstrip() + "\n", encoding="utf-8")
def _parse_args() -> argparse.Namespace:
"""Parse CLI args."""
parser = argparse.ArgumentParser(
description="Generate canonical Unraid GraphQL docs from introspection."
)
parser.add_argument(
"--api-url",
default=os.getenv("UNRAID_API_URL", ""),
help="GraphQL endpoint URL (default: UNRAID_API_URL env var).",
)
parser.add_argument(
"--api-key",
default=os.getenv("UNRAID_API_KEY", ""),
help="API key (default: UNRAID_API_KEY env var).",
)
parser.add_argument(
"--complete-output",
type=Path,
default=DEFAULT_COMPLETE_OUTPUT,
help=f"Full reference output path (default: {DEFAULT_COMPLETE_OUTPUT}).",
)
parser.add_argument(
"--summary-output",
type=Path,
default=DEFAULT_SUMMARY_OUTPUT,
help=f"Summary output path (default: {DEFAULT_SUMMARY_OUTPUT}).",
)
parser.add_argument(
"--introspection-output",
type=Path,
default=DEFAULT_INTROSPECTION_OUTPUT,
help=f"Introspection JSON output path (default: {DEFAULT_INTROSPECTION_OUTPUT}).",
)
parser.add_argument(
"--schema-output",
type=Path,
default=DEFAULT_SCHEMA_OUTPUT,
help=f"SDL schema output path (default: {DEFAULT_SCHEMA_OUTPUT}).",
)
parser.add_argument(
"--changes-output",
type=Path,
default=DEFAULT_CHANGES_OUTPUT,
help=f"Schema changes report path (default: {DEFAULT_CHANGES_OUTPUT}).",
)
parser.add_argument(
"--previous-introspection",
type=Path,
default=None,
help=(
"Previous introspection JSON used for diffing. Defaults to the current "
"introspection output path, falling back to the legacy docs path if present."
),
)
parser.add_argument(
"--timeout-seconds",
type=float,
default=90.0,
help="HTTP timeout in seconds (default: 90).",
)
parser.add_argument(
"--verify-ssl",
action="store_true",
help="Enable SSL cert verification. Default is disabled for local/self-signed setups.",
)
parser.add_argument(
"--include-introspection-types",
action="store_true",
help="Include __Schema/__Type/etc in the generated type list.",
)
return parser.parse_args()
def main() -> int:
"""Run generator CLI."""
args = _parse_args()
if not args.api_url:
raise SystemExit("Missing API URL. Provide --api-url or set UNRAID_API_URL.")
if not args.api_key:
raise SystemExit("Missing API key. Provide --api-key or set UNRAID_API_KEY.")
headers = {"x-api-key": args.api_key, "Content-Type": "application/json"}
with httpx.Client(timeout=args.timeout_seconds, verify=args.verify_ssl) as client:
response = client.post(args.api_url, json={"query": INTROSPECTION_QUERY}, headers=headers)
response.raise_for_status()
payload = response.json()
if payload.get("errors"):
errors = json.dumps(payload["errors"], indent=2)
raise SystemExit(f"GraphQL introspection returned errors:\n{errors}")
schema = _extract_schema(payload)
generated_at = dt.datetime.now(dt.UTC).replace(microsecond=0).isoformat()
previous_path = args.previous_introspection or (
args.introspection_output
if args.introspection_output.exists()
else LEGACY_INTROSPECTION_OUTPUT
)
previous_schema = _load_previous_schema(previous_path)
for path in {
args.complete_output,
args.summary_output,
args.introspection_output,
args.schema_output,
args.changes_output,
}:
path.parent.mkdir(parents=True, exist_ok=True)
full_reference = _build_markdown(
schema,
include_introspection=bool(args.include_introspection_types),
source=args.api_url,
generated_at=generated_at,
)
summary = _build_summary_markdown(
schema,
source=args.api_url,
generated_at=generated_at,
include_introspection=bool(args.include_introspection_types),
)
changes = _build_changes_markdown(
previous_schema,
schema,
source=args.api_url,
generated_at=generated_at,
include_introspection=bool(args.include_introspection_types),
)
args.complete_output.write_text(full_reference, encoding="utf-8")
args.summary_output.write_text(summary, encoding="utf-8")
args.introspection_output.write_text(
json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8"
)
_write_schema_graphql(args.schema_output, payload)
args.changes_output.write_text(changes, encoding="utf-8")
print(f"Wrote {args.complete_output}")
print(f"Wrote {args.summary_output}")
print(f"Wrote {args.introspection_output}")
print(f"Wrote {args.schema_output}")
print(f"Wrote {args.changes_output}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View file

@ -1,99 +0,0 @@
#!/usr/bin/env bash
# Validate Claude Code marketplace and plugin structure
set -uo pipefail
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
NC='\033[0m' # No Color
# Counters
CHECKS=0
PASSED=0
FAILED=0
check() {
local test_name="$1"
local test_cmd="$2"
CHECKS=$((CHECKS + 1))
echo -n "Checking: $test_name... "
if eval "$test_cmd" > /dev/null 2>&1; then
echo -e "${GREEN}${NC}"
PASSED=$((PASSED + 1))
return 0
else
echo -e "${RED}${NC}"
FAILED=$((FAILED + 1))
return 1
fi
}
echo "=== Validating Claude Code Marketplace Structure ==="
echo ""
# Check marketplace manifest
check "Marketplace manifest exists" "test -f .claude-plugin/marketplace.json"
check "Marketplace manifest is valid JSON" "jq empty .claude-plugin/marketplace.json"
check "Marketplace has name" "jq -e '.name' .claude-plugin/marketplace.json"
check "Marketplace has plugins array" "jq -e '.plugins | type == \"array\"' .claude-plugin/marketplace.json"
# Check plugin manifest
check "Plugin manifest exists" "test -f .claude-plugin/plugin.json"
check "Plugin manifest is valid JSON" "jq empty .claude-plugin/plugin.json"
check "Plugin has name" "jq -e '.name' .claude-plugin/plugin.json"
check "Plugin has version" "jq -e '.version' .claude-plugin/plugin.json"
# Check plugin structure
check "Plugin has SKILL.md" "test -f skills/unraid/SKILL.md"
check "Plugin has README.md" "test -f skills/unraid/README.md"
check "Plugin has scripts directory" "test -d skills/unraid/scripts"
check "Plugin has examples directory" "test -d skills/unraid/examples"
check "Plugin has references directory" "test -d skills/unraid/references"
# Validate plugin is listed in marketplace
check "Plugin listed in marketplace" "jq -e '.plugins[] | select(.name == \"unraid\")' .claude-plugin/marketplace.json"
# Check marketplace metadata
check "Marketplace has repository" "jq -e '.repository' .claude-plugin/marketplace.json"
check "Marketplace has owner" "jq -e '.owner' .claude-plugin/marketplace.json"
# Verify source path
PLUGIN_SOURCE=$(jq -r '.plugins[]? | select(.name == "unraid") | .source // empty' .claude-plugin/marketplace.json 2>/dev/null || true)
if [ -n "$PLUGIN_SOURCE" ]; then
check "Plugin source path is valid" "test -d \"$PLUGIN_SOURCE\""
else
CHECKS=$((CHECKS + 1))
FAILED=$((FAILED + 1))
echo -e "Checking: Plugin source path is valid... ${RED}${NC} (plugin not found in marketplace)"
fi
# Check version sync between pyproject.toml and plugin.json
echo "Checking version sync..."
TOML_VER=$(grep -m1 '^version = ' pyproject.toml | sed 's/version = "//;s/"//')
PLUGIN_VER=$(python3 -c "import json; print(json.load(open('.claude-plugin/plugin.json'))['version'])" 2>/dev/null || echo "ERROR_READING")
if [ "$TOML_VER" != "$PLUGIN_VER" ]; then
echo -e "${RED}FAIL: Version mismatch — pyproject.toml=$TOML_VER, plugin.json=$PLUGIN_VER${NC}"
CHECKS=$((CHECKS + 1))
FAILED=$((FAILED + 1))
else
echo -e "${GREEN}PASS: Versions in sync ($TOML_VER)${NC}"
CHECKS=$((CHECKS + 1))
PASSED=$((PASSED + 1))
fi
echo ""
echo "=== Results ==="
echo -e "Total checks: $CHECKS"
echo -e "${GREEN}Passed: $PASSED${NC}"
if [ $FAILED -gt 0 ]; then
echo -e "${RED}Failed: $FAILED${NC}"
exit 1
else
echo -e "${GREEN}All checks passed!${NC}"
echo ""
echo "Marketplace is ready for distribution at:"
echo " $(jq -r '.repository' .claude-plugin/marketplace.json)"
fi

View file

@ -1572,7 +1572,7 @@ wheels = [
[[package]]
name = "unraid-mcp"
version = "1.2.5"
version = "1.25.1"
source = { editable = "." }
dependencies = [
{ name = "fastapi" },