diff --git a/.gitignore b/.gitignore index bcf5182..2099ddb 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,7 @@ node_modules/ target/ venv/ __pycache__/ +30-evidence/ +50-reports/ +60-proofs/ +70-violations/ diff --git a/20-collectors/collect_backup_restore_drill.sh b/20-collectors/collect_backup_restore_drill.sh new file mode 100755 index 0000000..533d1ba --- /dev/null +++ b/20-collectors/collect_backup_restore_drill.sh @@ -0,0 +1,41 @@ +#!/usr/bin/env bash +set -euo pipefail + +OUT_DIR="${1:?usage: collect_backup_restore_drill.sh }" +mkdir -p "$OUT_DIR" + +ROOT="../vm-skills" +LATEST="$(find "$ROOT" -type f -name "*restore*drill*.json" 2>/dev/null | sort | tail -n 1 || true)" + +file_mtime_epoch() { + local file="$1" + if stat -c %Y "$file" >/dev/null 2>&1; then + stat -c %Y "$file" + else + stat -f %m "$file" + fi +} + +file_mtime_iso() { + local file="$1" + local mtime + mtime="$(file_mtime_epoch "$file")" + if date -u -r "$file" "+%Y-%m-%dT%H:%M:%SZ" >/dev/null 2>&1; then + date -u -r "$file" "+%Y-%m-%dT%H:%M:%SZ" + elif date -u -d "@${mtime}" "+%Y-%m-%dT%H:%M:%SZ" >/dev/null 2>&1; then + date -u -d "@${mtime}" "+%Y-%m-%dT%H:%M:%SZ" + else + date -u "+%Y-%m-%dT%H:%M:%SZ" + fi +} + +if [[ -n "$LATEST" && -f "$LATEST" ]]; then + TS="$(file_mtime_iso "$LATEST")" + cat > "$OUT_DIR/backup_restore_drill.json" < "$OUT_DIR/backup_restore_drill.json" <<'JSON' +{"collected": false, "reason": "no restore drill artifacts found"} +JSON +fi diff --git a/20-collectors/collect_constitution_hash.sh b/20-collectors/collect_constitution_hash.sh new file mode 100755 index 0000000..21ee39d --- /dev/null +++ b/20-collectors/collect_constitution_hash.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash +set -euo pipefail + +OUT_DIR="${1:?usage: collect_constitution_hash.sh }" +mkdir -p "$OUT_DIR" + +LOCK_PATH="../vm-mcp/governance/constitution.lock" + +hash_file() { + local file="$1" + if command -v sha256sum >/dev/null 2>&1; then + sha256sum "$file" | awk '{print $1}' + else + shasum -a 256 "$file" | awk '{print $1}' + fi +} + +if [[ -f "$LOCK_PATH" ]]; then + HASH="$(hash_file "$LOCK_PATH")" + cat > "$OUT_DIR/constitution_hash.json" < "$OUT_DIR/constitution_hash.json" <<'JSON' +{"collected": false, "reason": "constitution.lock not found at expected path"} +JSON +fi diff --git a/20-collectors/collect_ledger_verify.sh b/20-collectors/collect_ledger_verify.sh new file mode 100755 index 0000000..51947bf --- /dev/null +++ b/20-collectors/collect_ledger_verify.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash +set -euo pipefail + +OUT_DIR="${1:?usage: collect_ledger_verify.sh }" +mkdir -p "$OUT_DIR" + +if command -v ledger >/dev/null 2>&1; then + ledger verify --format json > "$OUT_DIR/ledger_verify.json" +elif command -v ledger-cli >/dev/null 2>&1; then + ledger-cli verify --format json > "$OUT_DIR/ledger_verify.json" +else + cat > "$OUT_DIR/ledger_verify.json" <<'JSON' +{"collected": false, "reason": "ledger CLI not found"} +JSON +fi diff --git a/40-rules/backup_restore_drill_recent.sh b/40-rules/backup_restore_drill_recent.sh new file mode 100755 index 0000000..5e53b03 --- /dev/null +++ b/40-rules/backup_restore_drill_recent.sh @@ -0,0 +1,91 @@ +#!/usr/bin/env bash +set -euo pipefail +source "$(dirname "$0")/../scripts/lib/common.sh" +require_cmd jq + +EVID_DIR="${1:?usage: backup_restore_drill_recent.sh }" +TS="$(iso_utc_now)" +FILE="$EVID_DIR/backup_restore_drill.json" + +MAX_DAYS="${VMCC_MAX_RESTORE_DRILL_AGE_DAYS:-7}" + +file_mtime_epoch() { + local file="$1" + if stat -c %Y "$file" >/dev/null 2>&1; then + stat -c %Y "$file" + else + stat -f %m "$file" + fi +} + +if [[ ! -f "$FILE" ]]; then + json_emit "$(jq -n --arg ts "$TS" '{ + version:"1.0.0", + rule_id:"backup.restore_drill_recent", + control_ids:["BC-01"], + passed:false, + severity:"MEDIUM", + timestamp:$ts, + evidence:[{path:"backup_restore_drill.json"}], + details:{error:"missing evidence file"} + }')" + exit 0 +fi + +COLLECTED="$(jq -r '.collected // false' "$FILE")" +if [[ "$COLLECTED" != "true" ]]; then + json_emit "$(jq -n --arg ts "$TS" '{ + version:"1.0.0", + rule_id:"backup.restore_drill_recent", + control_ids:["BC-01"], + passed:false, + severity:"MEDIUM", + timestamp:$ts, + evidence:[{path:"backup_restore_drill.json"}], + details:{error:"no restore drill evidence found"} + }')" + exit 0 +fi + +PATH_FOUND="$(jq -r '.path // empty' "$FILE")" +if [[ -z "$PATH_FOUND" || ! -f "$PATH_FOUND" ]]; then + json_emit "$(jq -n --arg ts "$TS" --arg p "$PATH_FOUND" '{ + version:"1.0.0", + rule_id:"backup.restore_drill_recent", + control_ids:["BC-01"], + passed:false, + severity:"MEDIUM", + timestamp:$ts, + evidence:[{path:"backup_restore_drill.json"}], + details:{error:"referenced drill file missing", referenced:$p} + }')" + exit 0 +fi + +NOW_EPOCH="$(date -u +%s)" +MTIME_EPOCH="$(file_mtime_epoch "$PATH_FOUND")" +AGE_DAYS="$(( (NOW_EPOCH - MTIME_EPOCH) / 86400 ))" + +if [[ "$AGE_DAYS" -le "$MAX_DAYS" ]]; then + json_emit "$(jq -n --arg ts "$TS" --argjson age "$AGE_DAYS" '{ + version:"1.0.0", + rule_id:"backup.restore_drill_recent", + control_ids:["BC-01"], + passed:true, + severity:"MEDIUM", + timestamp:$ts, + evidence:[{path:"backup_restore_drill.json"}], + details:{age_days:$age} + }')" +else + json_emit "$(jq -n --arg ts "$TS" --argjson age "$AGE_DAYS" --argjson max "$MAX_DAYS" '{ + version:"1.0.0", + rule_id:"backup.restore_drill_recent", + control_ids:["BC-01"], + passed:false, + severity:"MEDIUM", + timestamp:$ts, + evidence:[{path:"backup_restore_drill.json"}], + details:{error:"restore drill too old", age_days:$age, max_days:$max} + }')" +fi diff --git a/40-rules/governance_constitution_pinned.sh b/40-rules/governance_constitution_pinned.sh new file mode 100755 index 0000000..6cee97d --- /dev/null +++ b/40-rules/governance_constitution_pinned.sh @@ -0,0 +1,78 @@ +#!/usr/bin/env bash +set -euo pipefail +source "$(dirname "$0")/../scripts/lib/common.sh" +require_cmd jq + +EVID_DIR="${1:?usage: governance_constitution_pinned.sh }" +TS="$(iso_utc_now)" +FILE="$EVID_DIR/constitution_hash.json" + +PINNED_SHA256="${VMCC_PINNED_CONSTITUTION_SHA256:-}" + +if [[ ! -f "$FILE" ]]; then + json_emit "$(jq -n --arg ts "$TS" '{ + version:"1.0.0", + rule_id:"governance.constitution_pinned", + control_ids:["GV-01"], + passed:false, + severity:"HIGH", + timestamp:$ts, + evidence:[{path:"constitution_hash.json"}], + details:{error:"missing evidence file"} + }')" + exit 0 +fi + +COLLECTED="$(jq -r '.collected // false' "$FILE")" +if [[ "$COLLECTED" != "true" ]]; then + json_emit "$(jq -n --arg ts "$TS" '{ + version:"1.0.0", + rule_id:"governance.constitution_pinned", + control_ids:["GV-01"], + passed:false, + severity:"HIGH", + timestamp:$ts, + evidence:[{path:"constitution_hash.json"}], + details:{error:"constitution hash not collected"} + }')" + exit 0 +fi + +OBSERVED="$(jq -r '.sha256 // empty' "$FILE")" +if [[ -z "$PINNED_SHA256" ]]; then + json_emit "$(jq -n --arg ts "$TS" --arg observed "$OBSERVED" '{ + version:"1.0.0", + rule_id:"governance.constitution_pinned", + control_ids:["GV-01"], + passed:false, + severity:"HIGH", + timestamp:$ts, + evidence:[{path:"constitution_hash.json"}], + details:{error:"no pinned hash configured", observed_sha256:$observed} + }')" + exit 0 +fi + +if [[ "$OBSERVED" == "$PINNED_SHA256" ]]; then + json_emit "$(jq -n --arg ts "$TS" '{ + version:"1.0.0", + rule_id:"governance.constitution_pinned", + control_ids:["GV-01"], + passed:true, + severity:"HIGH", + timestamp:$ts, + evidence:[{path:"constitution_hash.json"}], + details:{} + }')" +else + json_emit "$(jq -n --arg ts "$TS" --arg observed "$OBSERVED" --arg pinned "$PINNED_SHA256" '{ + version:"1.0.0", + rule_id:"governance.constitution_pinned", + control_ids:["GV-01"], + passed:false, + severity:"HIGH", + timestamp:$ts, + evidence:[{path:"constitution_hash.json"}], + details:{error:"hash mismatch", observed_sha256:$observed, pinned_sha256:$pinned} + }')" +fi diff --git a/40-rules/ledger_hash_chain_intact.sh b/40-rules/ledger_hash_chain_intact.sh new file mode 100755 index 0000000..24490f1 --- /dev/null +++ b/40-rules/ledger_hash_chain_intact.sh @@ -0,0 +1,65 @@ +#!/usr/bin/env bash +set -euo pipefail +source "$(dirname "$0")/../scripts/lib/common.sh" +require_cmd jq + +EVID_DIR="${1:?usage: ledger_hash_chain_intact.sh }" +TS="$(iso_utc_now)" + +FILE="$EVID_DIR/ledger_verify.json" +if [[ ! -f "$FILE" ]]; then + json_emit "$(jq -n --arg ts "$TS" '{ + version:"1.0.0", + rule_id:"ledger.hash_chain_intact", + control_ids:["AU-01","AU-02"], + passed:false, + severity:"CRITICAL", + timestamp:$ts, + evidence:[{path:"ledger_verify.json"}], + details:{error:"missing evidence file"} + }')" + exit 0 +fi + +COLLECTED="$(jq -r '.collected // true' "$FILE")" +if [[ "$COLLECTED" != "true" ]]; then + json_emit "$(jq -n --arg ts "$TS" '{ + version:"1.0.0", + rule_id:"ledger.hash_chain_intact", + control_ids:["AU-01","AU-02"], + passed:false, + severity:"CRITICAL", + timestamp:$ts, + evidence:[{path:"ledger_verify.json"}], + details:{error:"ledger verify not collected"} + }')" + exit 0 +fi + +OK="$(jq -r '.ok // false' "$FILE")" +ENTRY_COUNT="$(jq -r '.entry_count // 0' "$FILE")" +FAILURES_JSON="$(jq -c '.failures // []' "$FILE")" + +if [[ "$OK" == "true" ]]; then + json_emit "$(jq -n --arg ts "$TS" --argjson count "$ENTRY_COUNT" '{ + version:"1.0.0", + rule_id:"ledger.hash_chain_intact", + control_ids:["AU-01","AU-02"], + passed:true, + severity:"CRITICAL", + timestamp:$ts, + evidence:[{path:"ledger_verify.json"}], + details:{entries_checked:$count} + }')" +else + json_emit "$(jq -n --arg ts "$TS" --argjson count "$ENTRY_COUNT" --argjson failures "$FAILURES_JSON" '{ + version:"1.0.0", + rule_id:"ledger.hash_chain_intact", + control_ids:["AU-01","AU-02"], + passed:false, + severity:"CRITICAL", + timestamp:$ts, + evidence:[{path:"ledger_verify.json"}], + details:{entries_checked:$count, failures:$failures} + }')" +fi diff --git a/README.md b/README.md index 105afb7..2b65f6d 100644 --- a/README.md +++ b/README.md @@ -21,14 +21,30 @@ vm-cc is the continuous compliance and evidence orchestration layer. It ingests - 90-automation/: pipelines/glue for end-to-end runs - scripts/: thin CLI wrappers to orchestrate collect → evaluate → report → sign +## Run directories +Each execution writes to a per-run folder set: +- 30-evidence/YYYY-MM-DD/RUN_ID/ +- 50-reports/YYYY-MM-DD/RUN_ID/ +- 60-proofs/YYYY-MM-DD/RUN_ID/ +- 70-violations/YYYY-MM-DD/RUN_ID/ + +RUN_ID format: `YYYYMMDDThhmmssZ_`. + ## Rule result contract (example) ``` { - "rule_id": "authority-hierarchy", + "version": "1.0.0", + "rule_id": "ledger.hash_chain_intact", + "control_ids": ["AU-01", "AU-02"], "passed": true, - "severity": "HIGH", + "severity": "CRITICAL", "timestamp": "2025-12-27T12:00:00Z", - "evidence": ["30-evidence/2025-12-27/authority.json"], - "details": { "checked_transitions": 42 } + "evidence": [ + { + "path": "30-evidence/2025-12-27/20251227T120000Z_ab12/ledger_verify.json", + "sha256": "..." + } + ], + "details": { "entries_checked": 18231 } } ``` diff --git a/config/rules.yaml b/config/rules.yaml index 3d73ab9..43f1eb3 100644 --- a/config/rules.yaml +++ b/config/rules.yaml @@ -1,24 +1,23 @@ -# Rule registry. Each entry binds a rule id to a source and evaluation script. -# Keep rule definitions in 40-rules/; this file ties them to schedules and severity. +version: "1.0.0" rules: - - id: authority-hierarchy - severity: HIGH - entry: "40-rules/authority_hierarchy.yaml" + - rule_id: "ledger.hash_chain_intact" + severity: "CRITICAL" + script: "40-rules/ledger_hash_chain_intact.sh" evidence: - - vm_mcp - - vm_ledger + - "ledger_verify.json" + controls: ["AU-01", "AU-02"] - - id: skills-health - severity: MEDIUM - entry: "40-rules/skills_health.yaml" + - rule_id: "governance.constitution_pinned" + severity: "HIGH" + script: "40-rules/governance_constitution_pinned.sh" evidence: - - vm_skills - - ops + - "constitution_hash.json" + controls: ["GV-01"] - - id: receipts-schema - severity: HIGH - entry: "40-rules/receipts_schema.yaml" + - rule_id: "backup.restore_drill_recent" + severity: "MEDIUM" + script: "40-rules/backup_restore_drill_recent.sh" evidence: - - vm_contracts - - vm_ledger + - "backup_restore_drill.json" + controls: ["BC-01"] diff --git a/config/schedules.yaml b/config/schedules.yaml index 06c6ce9..9f41005 100644 --- a/config/schedules.yaml +++ b/config/schedules.yaml @@ -1,13 +1,10 @@ -# Schedules for rule execution. Cron-like or duration strings. +version: "1.0.0" schedules: - - name: hourly-critical - every: "1h" - rules: - - authority-hierarchy - - receipts-schema + - name: "hourly" + cadence: "0 * * * *" + run: ["collect", "evaluate", "report", "sign"] - - name: daily-health - every: "24h" - rules: - - skills-health + - name: "daily" + cadence: "0 20 * * *" + run: ["collect", "evaluate", "report", "sign", "anchor"] diff --git a/config/sources.yaml b/config/sources.yaml index 66229c7..21f7229 100644 --- a/config/sources.yaml +++ b/config/sources.yaml @@ -10,7 +10,7 @@ vm_skills: - "*/outputs/ROOT.txt" ops: - path: "../ops" + path: "../vm-ops" evidence: - "20-identity/**" - "60-backups/**" diff --git a/schemas/rule_result.schema.json b/schemas/rule_result.schema.json new file mode 100644 index 0000000..5265b33 --- /dev/null +++ b/schemas/rule_result.schema.json @@ -0,0 +1,38 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "vm-cc rule_result v1", + "type": "object", + "required": [ + "version", + "rule_id", + "passed", + "severity", + "timestamp", + "evidence", + "details" + ], + "properties": { + "version": { "type": "string" }, + "rule_id": { "type": "string" }, + "passed": { "type": "boolean" }, + "severity": { + "type": "string", + "enum": ["LOW", "MEDIUM", "HIGH", "CRITICAL"] + }, + "timestamp": { "type": "string" }, + "control_ids": { "type": "array", "items": { "type": "string" } }, + "evidence": { + "type": "array", + "items": { + "type": "object", + "required": ["path"], + "properties": { + "path": { "type": "string" }, + "sha256": { "type": "string" } + } + } + }, + "details": { "type": "object" }, + "remediation": { "type": ["object", "null"] } + } +} diff --git a/scripts/lib/common.sh b/scripts/lib/common.sh new file mode 100755 index 0000000..d3a6932 --- /dev/null +++ b/scripts/lib/common.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +set -euo pipefail + +vmcc_root() { + cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd +} + +iso_utc_now() { + date -u "+%Y-%m-%dT%H:%M:%SZ" +} + +run_id() { + local ts + ts="$(date -u "+%Y%m%dT%H%M%SZ")" + local rnd + rnd="$(head -c 8 /dev/urandom | od -An -tx1 | tr -d ' \n')" + echo "${ts}_${rnd:0:8}" +} + +require_cmd() { + command -v "$1" >/dev/null 2>&1 || { echo "missing required tool: $1" >&2; exit 2; } +} + +json_emit() { + printf '%s\n' "$1" +} diff --git a/scripts/vmcc b/scripts/vmcc new file mode 100755 index 0000000..ecc25e8 --- /dev/null +++ b/scripts/vmcc @@ -0,0 +1,64 @@ +#!/usr/bin/env bash +set -euo pipefail + +source "$(dirname "$0")/lib/common.sh" + +ROOT="$(vmcc_root)" +CMD="${1:-}" +RUN_ID="${VMCC_RUN_ID:-$(run_id)}" +DAY="$(date -u "+%Y-%m-%d")" + +EVID_DIR="$ROOT/30-evidence/$DAY/$RUN_ID" +RULE_DIR="$ROOT/50-reports/$DAY/$RUN_ID/rules" +REP_DIR="$ROOT/50-reports/$DAY/$RUN_ID" + +mkdir -p "$EVID_DIR" "$RULE_DIR" "$REP_DIR" + +case "$CMD" in + collect) + echo "[vmcc] run_id=$RUN_ID" + echo "[vmcc] collecting evidence -> $EVID_DIR" + "$ROOT/20-collectors/collect_ledger_verify.sh" "$EVID_DIR" + "$ROOT/20-collectors/collect_constitution_hash.sh" "$EVID_DIR" + "$ROOT/20-collectors/collect_backup_restore_drill.sh" "$EVID_DIR" + ;; + evaluate) + echo "[vmcc] evaluating rules -> $RULE_DIR" + "$ROOT/40-rules/ledger_hash_chain_intact.sh" "$EVID_DIR" > "$RULE_DIR/ledger.hash_chain_intact.json" + "$ROOT/40-rules/governance_constitution_pinned.sh" "$EVID_DIR" > "$RULE_DIR/governance.constitution_pinned.json" + "$ROOT/40-rules/backup_restore_drill_recent.sh" "$EVID_DIR" > "$RULE_DIR/backup.restore_drill_recent.json" + ;; + report) + require_cmd jq + echo "[vmcc] assembling report -> $REP_DIR/report.json" + TS="$(iso_utc_now)" + PASSED_COUNT="$(jq -s '[.[] | select(.passed==true)] | length' "$RULE_DIR"/*.json)" + FAILED_COUNT="$(jq -s '[.[] | select(.passed==false)] | length' "$RULE_DIR"/*.json)" + + jq -n \ + --arg version "1.0.0" \ + --arg timestamp "$TS" \ + --arg run_id "$RUN_ID" \ + --arg day "$DAY" \ + --slurpfile rules <(cat "$RULE_DIR"/*.json) \ + --argjson passed "$PASSED_COUNT" \ + --argjson failed "$FAILED_COUNT" \ + '{ + version: $version, + timestamp: $timestamp, + period: "run", + run_id: $run_id, + day: $day, + summary: { + rules_passed: $passed, + rules_failed: $failed, + status: (if $failed == 0 then "COMPLIANT" else "NONCOMPLIANT" end) + }, + rules: $rules + }' > "$REP_DIR/report.json" + ;; + *) + echo "Usage: $0 {collect|evaluate|report}" >&2 + exit 1 + ;; +esac