Compare commits
2 Commits
c62ff092b7
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5bf3becddb | ||
|
|
805a8a4617 |
@@ -4,8 +4,36 @@ set -euo pipefail
|
|||||||
OUT_DIR="${1:?usage: collect_backup_restore_drill.sh <out_dir>}"
|
OUT_DIR="${1:?usage: collect_backup_restore_drill.sh <out_dir>}"
|
||||||
mkdir -p "$OUT_DIR"
|
mkdir -p "$OUT_DIR"
|
||||||
|
|
||||||
ROOT="../vm-skills"
|
DEFAULT_GLOB="*restore*drill*.json"
|
||||||
LATEST="$(find "$ROOT" -type f -name "*restore*drill*.json" 2>/dev/null | sort | tail -n 1 || true)"
|
DEFAULT_ROOTS=(
|
||||||
|
"../vm-ops/60-backups"
|
||||||
|
"../vm-skills"
|
||||||
|
)
|
||||||
|
|
||||||
|
matches=()
|
||||||
|
if [[ -n "${VMCC_RESTORE_DRILL_GLOB:-}" ]]; then
|
||||||
|
if [[ -f "$VMCC_RESTORE_DRILL_GLOB" ]]; then
|
||||||
|
matches+=("$VMCC_RESTORE_DRILL_GLOB")
|
||||||
|
elif [[ -d "$VMCC_RESTORE_DRILL_GLOB" ]]; then
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
matches+=("$file")
|
||||||
|
done < <(find "$VMCC_RESTORE_DRILL_GLOB" -type f -name "$DEFAULT_GLOB" -print0 2>/dev/null)
|
||||||
|
else
|
||||||
|
while IFS= read -r file; do
|
||||||
|
matches+=("$file")
|
||||||
|
done < <(compgen -G "$VMCC_RESTORE_DRILL_GLOB" || true)
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${#matches[@]} -eq 0 ]]; then
|
||||||
|
for root in "${DEFAULT_ROOTS[@]}"; do
|
||||||
|
if [[ -d "$root" ]]; then
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
matches+=("$file")
|
||||||
|
done < <(find "$root" -type f -name "$DEFAULT_GLOB" -print0 2>/dev/null)
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
file_mtime_epoch() {
|
file_mtime_epoch() {
|
||||||
local file="$1"
|
local file="$1"
|
||||||
@@ -20,8 +48,11 @@ file_mtime_iso() {
|
|||||||
local file="$1"
|
local file="$1"
|
||||||
local mtime
|
local mtime
|
||||||
mtime="$(file_mtime_epoch "$file")"
|
mtime="$(file_mtime_epoch "$file")"
|
||||||
if date -u -r "$file" "+%Y-%m-%dT%H:%M:%SZ" >/dev/null 2>&1; then
|
|
||||||
date -u -r "$file" "+%Y-%m-%dT%H:%M:%SZ"
|
# BSD/macOS: date -r <epoch>
|
||||||
|
if date -u -r "$mtime" "+%Y-%m-%dT%H:%M:%SZ" >/dev/null 2>&1; then
|
||||||
|
date -u -r "$mtime" "+%Y-%m-%dT%H:%M:%SZ"
|
||||||
|
# GNU: date -d "@<epoch>"
|
||||||
elif date -u -d "@${mtime}" "+%Y-%m-%dT%H:%M:%SZ" >/dev/null 2>&1; then
|
elif date -u -d "@${mtime}" "+%Y-%m-%dT%H:%M:%SZ" >/dev/null 2>&1; then
|
||||||
date -u -d "@${mtime}" "+%Y-%m-%dT%H:%M:%SZ"
|
date -u -d "@${mtime}" "+%Y-%m-%dT%H:%M:%SZ"
|
||||||
else
|
else
|
||||||
@@ -29,13 +60,26 @@ file_mtime_iso() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
if [[ -n "$LATEST" && -f "$LATEST" ]]; then
|
LATEST=""
|
||||||
|
LATEST_TS=0
|
||||||
|
for file in "${matches[@]}"; do
|
||||||
|
if [[ -f "$file" ]]; then
|
||||||
|
ts="$(file_mtime_epoch "$file")"
|
||||||
|
if [[ "$ts" -gt "$LATEST_TS" || ( "$ts" -eq "$LATEST_TS" && ( -z "$LATEST" || "$file" > "$LATEST" ) ) ]]; then
|
||||||
|
LATEST_TS="$ts"
|
||||||
|
LATEST="$file"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -n "$LATEST" ]]; then
|
||||||
TS="$(file_mtime_iso "$LATEST")"
|
TS="$(file_mtime_iso "$LATEST")"
|
||||||
cat > "$OUT_DIR/backup_restore_drill.json" <<JSON
|
jq -n \
|
||||||
{"collected": true, "path": "$LATEST", "observed_at": "$TS"}
|
--arg path "$LATEST" \
|
||||||
JSON
|
--arg ts "$TS" \
|
||||||
|
'{collected:true, path:$path, observed_at:$ts}' \
|
||||||
|
> "$OUT_DIR/backup_restore_drill.json"
|
||||||
else
|
else
|
||||||
cat > "$OUT_DIR/backup_restore_drill.json" <<'JSON'
|
jq -n '{collected:false, reason:"no restore drill artifacts found"}' \
|
||||||
{"collected": false, "reason": "no restore drill artifacts found"}
|
> "$OUT_DIR/backup_restore_drill.json"
|
||||||
JSON
|
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -4,20 +4,51 @@ set -euo pipefail
|
|||||||
OUT_DIR="${1:?usage: collect_ledger_verify.sh <out_dir>}"
|
OUT_DIR="${1:?usage: collect_ledger_verify.sh <out_dir>}"
|
||||||
mkdir -p "$OUT_DIR"
|
mkdir -p "$OUT_DIR"
|
||||||
|
|
||||||
if command -v ledger >/dev/null 2>&1; then
|
CANDIDATES=(
|
||||||
if ! ledger verify --format json > "$OUT_DIR/ledger_verify.json"; then
|
"ledger"
|
||||||
cat > "$OUT_DIR/ledger_verify.json" <<'JSON'
|
"ledger-cli"
|
||||||
{"collected": false, "reason": "ledger verify failed"}
|
"../vm-ledger/target/release/ledger"
|
||||||
JSON
|
"../vm-ledger/target/release/ledger-cli"
|
||||||
|
"../vm-ledger/target/debug/ledger"
|
||||||
|
"../vm-ledger/target/debug/ledger-cli"
|
||||||
|
"../vm-ledger/bin/ledger"
|
||||||
|
"../vm-ledger/bin/ledger-cli"
|
||||||
|
"../vm-ledger/ledger"
|
||||||
|
"../vm-ledger/ledger-cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
LEDGER_BIN=""
|
||||||
|
for c in "${CANDIDATES[@]}"; do
|
||||||
|
if [[ "$c" == "ledger" || "$c" == "ledger-cli" ]]; then
|
||||||
|
if command -v "$c" >/dev/null 2>&1; then
|
||||||
|
LEDGER_BIN="$c"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if [[ -x "$c" ]]; then
|
||||||
|
LEDGER_BIN="$c"
|
||||||
|
break
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
elif command -v ledger-cli >/dev/null 2>&1; then
|
done
|
||||||
if ! ledger-cli verify --format json > "$OUT_DIR/ledger_verify.json"; then
|
|
||||||
cat > "$OUT_DIR/ledger_verify.json" <<'JSON'
|
LEDGER_DIR="${VMCC_LEDGER_DIR:-../vm-ledger/log}"
|
||||||
{"collected": false, "reason": "ledger-cli verify failed"}
|
if [[ ! -d "$LEDGER_DIR" ]]; then
|
||||||
|
cat > "$OUT_DIR/ledger_verify.json" <<JSON
|
||||||
|
{"collected": false, "reason": "ledger dir not found: $LEDGER_DIR"}
|
||||||
JSON
|
JSON
|
||||||
fi
|
exit 0
|
||||||
else
|
fi
|
||||||
|
|
||||||
|
if [[ -z "$LEDGER_BIN" ]]; then
|
||||||
cat > "$OUT_DIR/ledger_verify.json" <<'JSON'
|
cat > "$OUT_DIR/ledger_verify.json" <<'JSON'
|
||||||
{"collected": false, "reason": "ledger CLI not found"}
|
{"collected": false, "reason": "ledger CLI not found"}
|
||||||
|
JSON
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! "$LEDGER_BIN" verify --dir "$LEDGER_DIR" --format json > "$OUT_DIR/ledger_verify.json" 2> "$OUT_DIR/ledger_verify.stderr"; then
|
||||||
|
cat > "$OUT_DIR/ledger_verify.json" <<JSON
|
||||||
|
{"collected": false, "reason": "$(basename "$LEDGER_BIN") verify failed"}
|
||||||
JSON
|
JSON
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
version: "1.0.0"
|
version: "1.0.0"
|
||||||
constitution_sha256: ""
|
constitution_sha256: "23bd0cf58ddb19a2664ecaf093de724b71aa2fa330624906199c7acd807700ff"
|
||||||
|
|||||||
@@ -1,25 +1,24 @@
|
|||||||
# Define evidence sources and where collectors should look.
|
version: "1.0.0"
|
||||||
# Keep paths external to code so collectors are layout-agnostic.
|
|
||||||
|
|
||||||
vm_skills:
|
|
||||||
path: "../vm-skills"
|
|
||||||
evidence:
|
|
||||||
- "*/outputs/status_matrix.json"
|
|
||||||
- "*/outputs/audit_report.md"
|
|
||||||
- "*/outputs/PROOF.json"
|
|
||||||
- "*/outputs/ROOT.txt"
|
|
||||||
|
|
||||||
ops:
|
ops:
|
||||||
path: "../vm-ops"
|
path: "../vm-ops"
|
||||||
evidence:
|
evidence:
|
||||||
- "20-identity/**"
|
- "20-identity/**"
|
||||||
- "60-backups/**"
|
- "60-backups/**/*"
|
||||||
- "70-audits/**"
|
- "70-audits/**/*"
|
||||||
|
|
||||||
vm_ledger:
|
vm_ledger:
|
||||||
path: "../vm-ledger"
|
path: "../vm-ledger"
|
||||||
evidence:
|
evidence:
|
||||||
- "log/entries.cborseq"
|
- "log/entries.cborseq"
|
||||||
|
- "target/release/ledger-cli"
|
||||||
|
- "target/debug/ledger-cli"
|
||||||
|
|
||||||
|
vm_skills:
|
||||||
|
path: "../vm-skills"
|
||||||
|
evidence:
|
||||||
|
- "*/outputs/**/*restore*drill*.json"
|
||||||
|
- "*/outputs/**/*.json"
|
||||||
|
|
||||||
vm_mcp:
|
vm_mcp:
|
||||||
path: "../vm-mcp"
|
path: "../vm-mcp"
|
||||||
|
|||||||
60
scripts/vmcc
60
scripts/vmcc
@@ -24,30 +24,44 @@ hash_file() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
write_manifest() {
|
write_manifest() {
|
||||||
|
require_cmd jq
|
||||||
local manifest="$EVID_DIR/manifest.json"
|
local manifest="$EVID_DIR/manifest.json"
|
||||||
local ts
|
local ts
|
||||||
ts="$(iso_utc_now)"
|
ts="$(iso_utc_now)"
|
||||||
{
|
|
||||||
echo "{"
|
if sort -z --version >/dev/null 2>&1; then
|
||||||
echo " \"version\": \"1.0.0\","
|
find "$EVID_DIR" -type f ! -name "manifest.json" -print0 \
|
||||||
echo " \"collected_at\": \"${ts}\","
|
| LC_ALL=C sort -z \
|
||||||
echo " \"run_id\": \"${RUN_ID}\","
|
| while IFS= read -r -d '' file; do
|
||||||
echo " \"files\": ["
|
local rel
|
||||||
local first=1
|
rel="${file#$ROOT/}"
|
||||||
while IFS= read -r file; do
|
local sha
|
||||||
local rel
|
sha="$(hash_file "$file")"
|
||||||
rel="${file#$ROOT/}"
|
jq -n --arg path "$rel" --arg sha "$sha" '{path:$path, sha256:$sha}'
|
||||||
local sha
|
done \
|
||||||
sha="$(hash_file "$file")"
|
| jq -s --arg ts "$ts" --arg run "$RUN_ID" '{
|
||||||
if [[ $first -eq 0 ]]; then
|
version: "1.0.0",
|
||||||
echo " ,"
|
collected_at: $ts,
|
||||||
fi
|
run_id: $run,
|
||||||
first=0
|
files: .
|
||||||
echo " {\"path\": \"${rel}\", \"sha256\": \"${sha}\"}"
|
}' > "$manifest"
|
||||||
done < <(find "$EVID_DIR" -type f ! -name "manifest.json" | sort)
|
else
|
||||||
echo " ]"
|
find "$EVID_DIR" -type f ! -name "manifest.json" \
|
||||||
echo "}"
|
| LC_ALL=C sort \
|
||||||
} > "$manifest"
|
| while IFS= read -r file; do
|
||||||
|
local rel
|
||||||
|
rel="${file#$ROOT/}"
|
||||||
|
local sha
|
||||||
|
sha="$(hash_file "$file")"
|
||||||
|
jq -n --arg path "$rel" --arg sha "$sha" '{path:$path, sha256:$sha}'
|
||||||
|
done \
|
||||||
|
| jq -s --arg ts "$ts" --arg run "$RUN_ID" '{
|
||||||
|
version: "1.0.0",
|
||||||
|
collected_at: $ts,
|
||||||
|
run_id: $run,
|
||||||
|
files: .
|
||||||
|
}' > "$manifest"
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
run_collect() {
|
run_collect() {
|
||||||
@@ -110,9 +124,7 @@ case "$CMD" in
|
|||||||
run_collect
|
run_collect
|
||||||
run_evaluate
|
run_evaluate
|
||||||
run_report
|
run_report
|
||||||
require_cmd jq
|
if jq -e -s 'map(select(.passed == false)) | length > 0' "$RULE_DIR"/*.json >/dev/null; then
|
||||||
FAILED_COUNT="$(jq -s '[.[] | select(.passed==false)] | length' "$RULE_DIR"/*.json)"
|
|
||||||
if [[ "$FAILED_COUNT" -ne 0 ]]; then
|
|
||||||
exit 3
|
exit 3
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
|
|||||||
Reference in New Issue
Block a user