Initial commit: VaultMesh Skills collection

Collection of operational skills for VaultMesh infrastructure including:
- backup-sovereign: Backup and recovery operations
- btc-anchor: Bitcoin anchoring
- cloudflare-tunnel-manager: Cloudflare tunnel management
- container-registry: Container registry operations
- disaster-recovery: Disaster recovery procedures
- dns-sovereign: DNS management
- eth-anchor: Ethereum anchoring
- gitea-bootstrap: Gitea setup and configuration
- hetzner-bootstrap: Hetzner server provisioning
- merkle-forest: Merkle tree operations
- node-hardening: Node security hardening
- operator-bootstrap: Operator initialization
- proof-verifier: Cryptographic proof verification
- rfc3161-anchor: RFC3161 timestamping
- secrets-vault: Secrets management

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
Vault Sovereign
2025-12-27 00:25:00 +00:00
commit eac77ef7b4
213 changed files with 11724 additions and 0 deletions

58
merkle-forest/SKILL.md Normal file
View File

@@ -0,0 +1,58 @@
---
name: merkle-forest
description: >
Build deterministic Merkle roots over files using BLAKE3, emit ROOT.txt and PROOF.json,
with plan/apply/rollback, verification, and audit report. Designed to feed rfc3161-anchor,
eth-anchor, and btc-anchor. Triggers: 'merkle root', 'build merkle tree', 'root.txt',
'proof receipt', 'hash artifacts'.
version: 1.0.0
---
# Merkle Forest (BLAKE3)
Computes:
- Leaf hashes: BLAKE3(file bytes)
- Merkle root: BLAKE3(left_hex || right_hex) up levels (duplicate last if odd)
- ROOT.txt + PROOF.json receipts
## Quick Start
```bash
cd ~/.claude/skills/merkle-forest
export INPUT_DIR="$HOME/infrastructure" # or INPUT_FILES="a,b,c"
export LABEL="infra-snapshot"
export NODE_NAME="node-a"
./scripts/00_preflight.sh
./scripts/10_plan.sh
export DRY_RUN=0
./scripts/11_apply.sh
./scripts/90_verify.sh
./scripts/99_report.sh
```
## Inputs
| Parameter | Required | Default | Description |
|---|---:|---|---|
| INPUT_DIR | No | (empty) | Directory to hash recursively |
| INPUT_FILES | No | (empty) | Comma-separated file paths |
| EXCLUDES | No | .git,node_modules,target,dist,outputs | Exclude patterns |
| LABEL | No | snapshot | Run label |
| NODE_NAME | No | node-a | Node id |
| OUTPUT_DIR | No | outputs | Outputs base |
| DRY_RUN | No | 1 | Apply refuses unless DRY_RUN=0 |
| REQUIRE_CONFIRM | No | 1 | Require confirmation phrase |
| CONFIRM_PHRASE | No | I UNDERSTAND THIS WILL HASH FILES AND EMIT A ROOT | Safety phrase |
## Outputs
`outputs/runs/<node>_<label>_<timestamp>/`
- files.txt, leaf_hashes.txt, levels/, ROOT.txt, PROOF.json, status_matrix.json, audit_report.md
## EU Compliance
EU (Ireland - Dublin), Irish jurisdiction. Local-first proof artifacts.

40
merkle-forest/config.json Normal file
View File

@@ -0,0 +1,40 @@
{
"name": "merkle-forest",
"version": "1.0.0",
"defaults": {
"LABEL": "snapshot",
"NODE_NAME": "node-a",
"EXCLUDES": ".git,node_modules,target,dist,outputs",
"OUTPUT_DIR": "outputs",
"DRY_RUN": "1",
"REQUIRE_CONFIRM": "1",
"CONFIRM_PHRASE": "I UNDERSTAND THIS WILL HASH FILES AND EMIT A ROOT"
},
"phases": {
"preflight": [
"00_preflight.sh"
],
"merkle": {
"plan": [
"10_plan.sh"
],
"apply": [
"11_apply.sh"
],
"rollback": [
"rollback/undo_last_run.sh"
]
},
"verify": [
"90_verify.sh"
],
"report": [
"99_report.sh"
]
},
"eu_compliance": {
"data_residency": "EU",
"jurisdiction": "Ireland",
"gdpr_applicable": true
}
}

View File

@@ -0,0 +1,7 @@
# Merkle Root Spec (v1)
- Leaf = BLAKE3(file bytes)
- Parent = BLAKE3(left_hex || right_hex) using ASCII hex concatenation
- Sort paths for deterministic ordering
- Duplicate last node on odd counts
- Emit ROOT.txt + PROOF.json

View File

@@ -0,0 +1,19 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
SKILL_ROOT="$(dirname "$SCRIPT_DIR")"
source "$SCRIPT_DIR/_common.sh"
: "${INPUT_DIR:=}"
: "${INPUT_FILES:=}"
: "${OUTPUT_DIR:=$SKILL_ROOT/outputs}"
main() {
need find; need sort; need awk; need wc; need tr
pick_hasher >/dev/null
if [[ -z "$INPUT_DIR" && -z "$INPUT_FILES" ]]; then die "Set INPUT_DIR or INPUT_FILES."; fi
if [[ -n "$INPUT_DIR" && ! -d "$INPUT_DIR" ]]; then die "INPUT_DIR not a directory: $INPUT_DIR"; fi
mkdir -p "$OUTPUT_DIR/runs"
log_info "Preflight OK."
}
main "$@"

View File

@@ -0,0 +1,48 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
SKILL_ROOT="$(dirname "$SCRIPT_DIR")"
source "$SCRIPT_DIR/_common.sh"
: "${INPUT_DIR:=}"
: "${INPUT_FILES:=}"
: "${EXCLUDES:=.git,node_modules,target,dist,outputs}"
: "${LABEL:=snapshot}"
: "${NODE_NAME:=node-a}"
: "${OUTPUT_DIR:=$SKILL_ROOT/outputs}"
build_file_list() {
local out="$1"
local find_args=()
IFS=',' read -r -a ex <<< "$EXCLUDES"
for pat in "${ex[@]}"; do [[ -n "$pat" ]] && find_args+=( -not -path "*/$pat/*" ); done
if [[ -n "$INPUT_FILES" ]]; then
: > "$out"
IFS=',' read -r -a files <<< "$INPUT_FILES"
for f in "${files[@]}"; do
f="${f#"${f%%[![:space:]]*}"}"; f="${f%"${f##*[![:space:]]}"}"
[[ -f "$f" ]] || die "Not a file: $f"
echo "$f" >> "$out"
done
else
(cd "$INPUT_DIR" && find . -type f "${find_args[@]}" | sed 's|^\./||') > "$out.rel"
awk -v root="$INPUT_DIR" '{print root "/" $0}' "$out.rel" > "$out"
rm -f "$out.rel"
fi
sort -u "$out" -o "$out"
}
main() {
local tmp; tmp="$(mktemp)"
build_file_list "$tmp"
local count; count="$(wc -l < "$tmp" | tr -d ' ')"
echo "[PLAN] $(date -Iseconds) Merkle Forest"
echo "[PLAN] Node: $NODE_NAME Label: $LABEL"
echo "[PLAN] Hasher: $(pick_hasher)"
echo "[PLAN] Files: $count"
echo "[PLAN] Output: $OUTPUT_DIR"
echo "[PLAN] Next: export DRY_RUN=0 && ./scripts/11_apply.sh"
rm -f "$tmp"
}
main "$@"

View File

@@ -0,0 +1,107 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
SKILL_ROOT="$(dirname "$SCRIPT_DIR")"
source "$SCRIPT_DIR/_common.sh"
: "${INPUT_DIR:=}"
: "${INPUT_FILES:=}"
: "${EXCLUDES:=.git,node_modules,target,dist,outputs}"
: "${LABEL:=snapshot}"
: "${NODE_NAME:=node-a}"
: "${OUTPUT_DIR:=$SKILL_ROOT/outputs}"
build_file_list() {
local out="$1"
local find_args=()
IFS=',' read -r -a ex <<< "$EXCLUDES"
for pat in "${ex[@]}"; do [[ -n "$pat" ]] && find_args+=( -not -path "*/$pat/*" ); done
if [[ -n "$INPUT_FILES" ]]; then
: > "$out"
IFS=',' read -r -a files <<< "$INPUT_FILES"
for f in "${files[@]}"; do
f="${f#"${f%%[![:space:]]*}"}"; f="${f%"${f##*[![:space:]]}"}"
[[ -f "$f" ]] || die "Not a file: $f"
echo "$f" >> "$out"
done
else
(cd "$INPUT_DIR" && find . -type f "${find_args[@]}" | sed 's|^\./||') > "$out.rel"
awk -v root="$INPUT_DIR" '{print root "/" $0}' "$out.rel" > "$out"
rm -f "$out.rel"
fi
sort -u "$out" -o "$out"
}
main() {
confirm_gate
local hasher; hasher="$(pick_hasher)"
mkdir -p "$OUTPUT_DIR/runs"
local ts; ts="$(date -Iseconds | tr ':' '-')"
local run_dir="$OUTPUT_DIR/runs/${NODE_NAME}_${LABEL}_${ts}"
mkdir -p "$run_dir/levels"
local files="$run_dir/files.txt"
build_file_list "$files"
local n; n="$(wc -l < "$files" | tr -d ' ')"
[[ "$n" -gt 0 ]] || die "No files to hash."
local leaf="$run_dir/leaf_hashes.txt"
: > "$leaf"
while IFS= read -r f; do
h="$(hash_file "$hasher" "$f")"
printf "%s %s\n" "$h" "$f" >> "$leaf"
done < "$files"
sort "$leaf" -o "$leaf"
awk '{print $1}' "$leaf" > "$run_dir/levels/level_0.txt"
local level=0 cur="$run_dir/levels/level_0.txt"
while true; do
local count; count="$(wc -l < "$cur" | tr -d ' ')"
[[ "$count" -le 1 ]] && break
local next="$run_dir/levels/level_$((level+1)).txt"
: > "$next"
mapfile -t arr < "$cur"
i=0
while [[ $i -lt ${#arr[@]} ]]; do
left="${arr[$i]}"
if [[ $((i+1)) -lt ${#arr[@]} ]]; then right="${arr[$((i+1))]}"; else right="$left"; fi
hash_pair_hex "$hasher" "$left" "$right" >> "$next"
echo "" >> "$next"
i=$((i+2))
done
grep -v '^$' "$next" > "$next.tmp" && mv "$next.tmp" "$next"
level=$((level+1)); cur="$next"
done
root_hex="$(head -n 1 "$cur")"
[[ -n "$root_hex" ]] || die "Failed to compute root."
cat > "$run_dir/ROOT.txt" <<EOF
root_hex=$root_hex
hasher=BLAKE3
leaves=$n
label=$LABEL
node=$NODE_NAME
timestamp=$(date -Iseconds)
EOF
cat > "$run_dir/PROOF.json" <<EOF
{
"skill": "merkle-forest",
"version": "1.0.0",
"timestamp": "$(date -Iseconds)",
"node": "$NODE_NAME",
"label": "$LABEL",
"hasher": "BLAKE3",
"leaf_count": $n,
"root_hex": "$root_hex",
"artifacts": {"root":"ROOT.txt","proof":"PROOF.json","leaf_hashes":"leaf_hashes.txt"}
}
EOF
echo "$run_dir" > "$OUTPUT_DIR/last_run_dir.txt"
log_info "Run created: $run_dir"
log_info "ROOT: $root_hex"
}
main "$@"

View File

@@ -0,0 +1,41 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
SKILL_ROOT="$(dirname "$SCRIPT_DIR")"
source "$SCRIPT_DIR/_common.sh"
: "${OUTPUT_DIR:=$SKILL_ROOT/outputs}"
main() {
[[ -f "$OUTPUT_DIR/last_run_dir.txt" ]] || die "No last_run_dir.txt. Run 11_apply.sh first."
run_dir="$(cat "$OUTPUT_DIR/last_run_dir.txt")"
status="$run_dir/status_matrix.json"
ok_root=false; ok_proof=false; ok_leaf=false
[[ -f "$run_dir/ROOT.txt" ]] && ok_root=true
[[ -f "$run_dir/PROOF.json" ]] && ok_proof=true
[[ -f "$run_dir/leaf_hashes.txt" ]] && ok_leaf=true
root_hex="$(grep '^root_hex=' "$run_dir/ROOT.txt" 2>/dev/null | cut -d= -f2 || true)"
blockers="[]"
if [[ "$ok_root" != "true" ]]; then blockers='["missing_root_txt"]'
elif [[ "$ok_proof" != "true" ]]; then blockers='["missing_proof_json"]'
fi
cat > "$status" <<EOF
{
"skill": "merkle-forest",
"timestamp": "$(date -Iseconds)",
"run_dir": "$(json_escape "$run_dir")",
"root_hex": "$(json_escape "$root_hex")",
"checks": [
{"name":"leaf_hashes_present", "ok": $ok_leaf},
{"name":"root_present", "ok": $ok_root},
{"name":"proof_present", "ok": $ok_proof}
],
"blockers": $blockers,
"warnings": [],
"next_steps": ["rfc3161-anchor","eth-anchor","btc-anchor"]
}
EOF
log_info "Wrote $status"
cat "$status"
}
main "$@"

View File

@@ -0,0 +1,34 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
SKILL_ROOT="$(dirname "$SCRIPT_DIR")"
source "$SCRIPT_DIR/_common.sh"
: "${OUTPUT_DIR:=$SKILL_ROOT/outputs}"
main() {
[[ -f "$OUTPUT_DIR/last_run_dir.txt" ]] || die "No last_run_dir.txt. Run 11_apply.sh first."
run_dir="$(cat "$OUTPUT_DIR/last_run_dir.txt")"
report="$run_dir/audit_report.md"
status="$run_dir/status_matrix.json"
root_hex="$(grep '^root_hex=' "$run_dir/ROOT.txt" | cut -d= -f2)"
cat > "$report" <<EOF
# Merkle Forest Audit Report
**Generated:** $(date -Iseconds)
**Run Dir:** \`$run_dir\`
**Root Hex:** \`$root_hex\`
**Skill Version:** 1.0.0
## Status Matrix
$(if [[ -f "$status" ]]; then echo '```json'; cat "$status"; echo '```'; else echo "_Missing status_matrix.json_"; fi)
## EU Compliance
EU (Ireland - Dublin), Irish jurisdiction. Local-first proof artifacts.
EOF
log_info "Wrote $report"
cat "$report"
}
main "$@"

View File

@@ -0,0 +1,52 @@
#!/usr/bin/env bash
set -euo pipefail
log_info(){ echo "[INFO] $(date -Iseconds) $*"; }
log_warn(){ echo "[WARN] $(date -Iseconds) $*" >&2; }
log_error(){ echo "[ERROR] $(date -Iseconds) $*" >&2; }
die(){ log_error "$*"; exit 1; }
need(){ command -v "$1" >/dev/null 2>&1 || die "Missing required tool: $1"; }
confirm_gate() {
: "${DRY_RUN:=1}"
: "${REQUIRE_CONFIRM:=1}"
: "${CONFIRM_PHRASE:=I UNDERSTAND THIS WILL HASH FILES AND EMIT A ROOT}"
[[ "$DRY_RUN" == "0" ]] || die "DRY_RUN=$DRY_RUN (set DRY_RUN=0)."
if [[ "$REQUIRE_CONFIRM" == "1" ]]; then
echo "Type to confirm:"
echo " $CONFIRM_PHRASE"
read -r input
[[ "$input" == "$CONFIRM_PHRASE" ]] || die "Confirmation phrase mismatch."
fi
}
pick_hasher() {
if command -v b3sum >/dev/null 2>&1; then echo "b3sum"
elif command -v blake3 >/dev/null 2>&1; then echo "blake3"
else die "Need b3sum or blake3 installed."
fi
}
hash_file() {
local hasher="$1" file="$2"
if [[ "$hasher" == "b3sum" ]]; then b3sum "$file" | awk '{print $1}'
else blake3 "$file" | awk '{print $1}'
fi
}
hash_pair_hex() {
local hasher="$1" left="$2" right="$3"
local tmp; tmp="$(mktemp)"
printf "%s%s" "$left" "$right" > "$tmp"
local h
if [[ "$hasher" == "b3sum" ]]; then h="$(b3sum "$tmp" | awk '{print $1}')"
else h="$(blake3 "$tmp" | awk '{print $1}')"
fi
rm -f "$tmp"
printf "%s" "$h"
}
json_escape() {
local s="$1"
s="${s//\\/\\\\}"; s="${s//\"/\\\"}"; s="${s//$'\n'/\\n}"
printf "%s" "$s"
}

View File

@@ -0,0 +1,17 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
SKILL_ROOT="$(dirname "$(dirname "$SCRIPT_DIR")")"
source "$SKILL_ROOT/scripts/_common.sh"
: "${OUTPUT_DIR:=$SKILL_ROOT/outputs}"
main() {
confirm_gate
if [[ ! -f "$OUTPUT_DIR/last_run_dir.txt" ]]; then log_warn "No last run."; exit 0; fi
run_dir="$(cat "$OUTPUT_DIR/last_run_dir.txt")"
[[ -d "$run_dir" ]] && rm -rf "$run_dir"
rm -f "$OUTPUT_DIR/last_run_dir.txt" || true
log_info "Rollback complete."
}
main "$@"