Initialize repository snapshot

This commit is contained in:
Vault Sovereign
2025-12-27 00:10:32 +00:00
commit 110d644e10
281 changed files with 40331 additions and 0 deletions

View File

@@ -0,0 +1,944 @@
#!/usr/bin/env python3
from __future__ import annotations
import argparse
import hashlib
import json
import shutil
from pathlib import Path
def _require_no_floats(value: object, *, path: str = "$") -> None:
if isinstance(value, float):
raise ValueError(f"float not allowed in canonical JSON at {path}")
if isinstance(value, dict):
for k, v in value.items():
_require_no_floats(v, path=f"{path}.{k}")
elif isinstance(value, list):
for i, v in enumerate(value):
_require_no_floats(v, path=f"{path}[{i}]")
def _canonical_json_bytes(obj: object) -> bytes:
_require_no_floats(obj)
return json.dumps(
obj,
sort_keys=True,
separators=(",", ":"),
ensure_ascii=False,
allow_nan=False,
).encode("utf-8")
def _vmhash(data: bytes, *, hash_algo: str) -> str:
if hash_algo != "sha256":
raise ValueError("this fixture generator supports sha256 only")
return f"sha256:{hashlib.sha256(data).hexdigest()}"
def _hex_part(value: str) -> str:
return value.split(":", 1)[-1]
def _compute_merkle_root(leaves: list[str], *, hash_algo: str) -> str:
if not leaves:
return _vmhash(b"empty", hash_algo=hash_algo)
if len(leaves) == 1:
return leaves[0]
level = leaves[:]
while len(level) > 1:
next_level: list[str] = []
for i in range(0, len(level), 2):
left = level[i]
right = level[i + 1] if i + 1 < len(level) else left
combined = (_hex_part(left) + _hex_part(right)).encode("utf-8")
next_level.append(_vmhash(combined, hash_algo=hash_algo))
level = next_level
return level[0]
def _op_digest(*, op: str, params: dict, hash_algo: str) -> str:
op_obj = {"op": op, "params": params}
return _vmhash(_canonical_json_bytes(op_obj), hash_algo=hash_algo)
def _event_hash(event_without_event_hash: dict, *, hash_algo: str) -> str:
return _vmhash(_canonical_json_bytes(event_without_event_hash), hash_algo=hash_algo)
def _write_json(path: Path, obj: object, *, pretty: bool) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
if pretty:
path.write_text(
json.dumps(obj, indent=2, sort_keys=True, ensure_ascii=False) + "\n",
encoding="utf-8",
)
return
path.write_text(
json.dumps(obj, sort_keys=True, separators=(",", ":"), ensure_ascii=False),
encoding="utf-8",
)
def _write_receipts_jsonl(path: Path, events: list[dict]) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
lines = [
json.dumps(e, sort_keys=True, separators=(",", ":"), ensure_ascii=False)
for e in events
]
path.write_text("\n".join(lines) + "\n", encoding="utf-8")
def _write_roots_txt(path: Path, *, roots_by_seq: dict[int, str]) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
lines = ["# Sentinel root history (seq -> merkle root)"]
for seq in sorted(roots_by_seq.keys()):
lines.append(f"seq={seq} root={roots_by_seq[seq]}")
path.write_text("\n".join(lines) + "\n", encoding="utf-8")
def _file_digest(path: Path, *, hash_algo: str) -> tuple[str, int]:
data = path.read_bytes()
return _vmhash(data, hash_algo=hash_algo), len(data)
def _write_integrity_json(
bundle_dir: Path,
*,
include_paths: list[str],
hash_algo: str,
) -> None:
files: list[dict] = []
for rel in include_paths:
fp = (bundle_dir / rel).resolve()
digest, size = _file_digest(fp, hash_algo=hash_algo)
files.append({"path": rel, "digest": digest, "size_bytes": size})
integrity = {
"format": "vm-sentinel-integrity-v1",
"hash_algo": hash_algo,
"files": sorted(files, key=lambda e: e["path"]),
}
_write_json(bundle_dir / "integrity.json", integrity, pretty=True)
def _flatten_events_for_verifier(events: list[dict], *, since_seq: int, until_seq: int) -> list[dict]:
by_seq: dict[int, list[dict]] = {}
for ev in events:
seq = ev.get("seq")
if isinstance(seq, int):
by_seq.setdefault(seq, []).append(ev)
ordered = [by_seq[s][0] for s in range(since_seq, until_seq + 1) if s in by_seq]
return ordered
def _build_bundle(
bundle_dir: Path,
*,
events_in_file_order: list[dict],
since_seq: int,
until_seq: int,
seal_id: str,
instance_id: str,
created_at: str,
canonicalization_version: str,
sentinel_version: str,
schema_version: str,
hash_algo: str,
include_readme: bool = True,
omit_roots_file: bool = False,
extra_unlisted_file: bool = False,
integrity_override: callable | None = None,
) -> None:
if bundle_dir.exists():
shutil.rmtree(bundle_dir)
bundle_dir.mkdir(parents=True, exist_ok=True)
if include_readme:
(bundle_dir / "README.md").write_text(
f"MERIDIAN v1 conformance fixture: {bundle_dir.name}\n", encoding="utf-8"
)
# Build event hashes in file order, but with prev_event_hash chaining defined by seq order.
#
# We set prev_event_hash for each event to the previous event hash in (seq, file-order) ordering.
# This matches the verifier's deterministic sort-by-seq behavior for normal (unique seq) fixtures,
# and produces stable behavior for duplicate-seq fixtures (extras may be dropped by the verifier).
events_sorted = sorted(
enumerate(events_in_file_order),
key=lambda t: (int(t[1]["seq"]), t[0]),
)
# Fill op_digest for every event first (may already be overridden by caller).
for _, ev in events_sorted:
payload = ev.get("payload")
if not isinstance(payload, dict):
raise ValueError("payload must be an object")
params = payload.get("params")
if params is None:
params = {}
payload["params"] = params
if not isinstance(params, dict):
raise ValueError("payload.params must be an object")
if not isinstance(ev.get("op"), str):
raise ValueError("op must be a string")
if not isinstance(ev.get("op_digest"), str) or not ev.get("op_digest"):
ev["op_digest"] = _op_digest(op=ev["op"], params=params, hash_algo=hash_algo)
# Fill prev_event_hash + event_hash.
prev_hash = "0"
for idx, (_, ev) in enumerate(events_sorted):
if idx == 0:
if int(ev["seq"]) != 0:
raise ValueError("first event must have seq=0 for these fixtures")
ev["prev_event_hash"] = "0"
else:
if not isinstance(ev.get("prev_event_hash"), str) or not ev.get("prev_event_hash"):
ev["prev_event_hash"] = prev_hash
ev_no_hash = dict(ev)
ev_no_hash.pop("event_hash", None)
ev["event_hash"] = _event_hash(ev_no_hash, hash_algo=hash_algo)
prev_hash = ev["event_hash"]
# Write receipts
receipts_path = bundle_dir / "receipts.jsonl"
_write_receipts_jsonl(receipts_path, events_in_file_order)
# Seal + roots are computed over the verifier-flattened range view.
flattened = _flatten_events_for_verifier(events_in_file_order, since_seq=since_seq, until_seq=until_seq)
leaves = [ev["event_hash"] for ev in flattened]
root_start = _vmhash(b"empty", hash_algo=hash_algo)
root_end = _compute_merkle_root(leaves, hash_algo=hash_algo)
# Root history for the declared range (genesis-range fixtures only).
roots_by_seq: dict[int, str] = {}
running: list[str] = []
for ev in flattened:
running.append(ev["event_hash"])
roots_by_seq[int(ev["seq"])] = _compute_merkle_root(running, hash_algo=hash_algo)
if not omit_roots_file:
_write_roots_txt(bundle_dir / "roots.txt", roots_by_seq=roots_by_seq)
seal = {
"format": "vm-sentinel-seal-v1",
"sentinel_version": sentinel_version,
"schema_version": schema_version,
"hash_algo": hash_algo,
"canonicalization_version": canonicalization_version,
"seal_id": seal_id,
"created_at": created_at,
"instance_id": instance_id,
"ledger_type": "jsonl",
"range": {
"since_seq": since_seq,
"until_seq": until_seq,
"since_ts": str(events_in_file_order[0]["ts"]),
"until_ts": str(events_in_file_order[-1]["ts"]),
},
"root": {"start": root_start, "end": root_end, "seq": until_seq},
"files": {
"receipts": "receipts.jsonl",
"roots": "roots.txt",
"integrity": "integrity.json",
"verifier_manifest": "verifier_manifest.json",
},
}
_write_json(bundle_dir / "seal.json", seal, pretty=False)
verifier_manifest = {
"format": "vm-sentinel-verifier-manifest-v1",
"sentinel_version": sentinel_version,
"schema_version": schema_version,
"canonicalization_version": canonicalization_version,
"hash_algo": hash_algo,
"verifier": {"name": "vm_verify_sentinel_bundle.py", "version": "0.1.0"},
}
_write_json(bundle_dir / "verifier_manifest.json", verifier_manifest, pretty=False)
if extra_unlisted_file:
(bundle_dir / "UNLISTED.bin").write_bytes(b"unlisted")
# Default integrity: cover all bundle files (including README.md if present).
include_paths = ["receipts.jsonl", "seal.json", "verifier_manifest.json"]
if not omit_roots_file:
include_paths.append("roots.txt")
if include_readme:
include_paths.append("README.md")
_write_integrity_json(bundle_dir, include_paths=include_paths, hash_algo=hash_algo)
if integrity_override is not None:
integrity_override(bundle_dir)
def _uuid(n: int) -> str:
return f"00000000-0000-4000-8000-{n:012d}"
def _base_boot_event(*, seq: int, event_id: int, actor: str, ts: str, trace_id: str) -> dict:
return {
"event_id": _uuid(event_id),
"seq": seq,
"ts": ts,
"event_type": "boot_event",
"actor": actor,
"cap_hash": "none",
"op": "sentinel.boot_event.v1",
"op_digest": "",
"result": "ok",
"trace_id": trace_id,
"prev_event_hash": "",
"event_hash": "",
"payload": {
"params": {
"canonicalization_version": "sentinel-event-jcs-v1",
"hash_algo": "sha256",
"schema_version": "1.0.0",
"sentinel_version": "0.1.0",
}
},
}
def main() -> int:
p = argparse.ArgumentParser()
p.add_argument(
"--suite-dir",
default=str(Path(__file__).resolve().parents[1]),
help="Path to MERIDIAN_V1_CONFORMANCE_TEST_SUITE directory",
)
args = p.parse_args()
suite_dir = Path(args.suite_dir).resolve()
fixtures_dir = suite_dir / "fixtures"
hash_algo = "sha256"
canonicalization_version = "sentinel-event-jcs-v1"
sentinel_version = "0.1.0"
schema_version = "1.0.0"
instance_id = "did:vm:meridian:gw:demo"
pass_dir = fixtures_dir / "pass"
fail_dir = fixtures_dir / "fail"
# --- PASS fixtures ---
# PASS: refusal proof (intent + shadow receipt)
boot_trace = "11111111-1111-4111-8111-111111111111"
cmd_trace = "22222222-2222-4222-8222-222222222222"
refusal_events = [
_base_boot_event(
seq=0,
event_id=1,
actor=instance_id,
ts="2025-03-17T03:17:40Z",
trace_id=boot_trace,
),
{
"event_id": _uuid(2),
"seq": 1,
"ts": "2025-03-17T03:17:41Z",
"event_type": "action_intent",
"actor": "did:vm:operator:demo",
"cap_hash": "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"op": "meridian.v1.plc.write",
"op_digest": "",
"result": "ok",
"trace_id": cmd_trace,
"prev_event_hash": "",
"event_hash": "",
"payload": {
"kind": "command_requested",
"params": {"plc": "plc:demo", "register": "R1", "value": "1"},
},
},
{
"event_id": _uuid(3),
"seq": 2,
"ts": "2025-03-17T03:17:42Z",
"event_type": "shadow_receipt",
"actor": instance_id,
"cap_hash": "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"op": "meridian.v1.plc.write",
"op_digest": "",
"result": "deny",
"trace_id": cmd_trace,
"prev_event_hash": "",
"event_hash": "",
"payload": {
"kind": "command_refused",
"reason_code": "safety_interlock",
"reason_text": "safety policy denied write",
"would_have_done": {"op": "meridian.v1.plc.write"},
"params": {"plc": "plc:demo", "register": "R1", "value": "1"},
},
},
]
_build_bundle(
pass_dir / "refusal_proof_pass",
events_in_file_order=refusal_events,
since_seq=0,
until_seq=2,
seal_id="mv1_refusal_proof_pass",
instance_id=instance_id,
created_at="2025-03-17T03:17:43Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
)
# PASS: tamper signal recorded (no trace linkage semantics)
tamper_trace = "33333333-3333-4333-8333-333333333333"
tamper_events = [
_base_boot_event(
seq=0,
event_id=101,
actor=instance_id,
ts="2025-03-17T03:17:40Z",
trace_id=boot_trace,
),
{
"event_id": _uuid(102),
"seq": 1,
"ts": "2025-03-17T03:17:41Z",
"event_type": "tamper_signal",
"actor": instance_id,
"cap_hash": "none",
"op": "meridian.v1.tamper_signal",
"op_digest": "",
"result": "ok",
"trace_id": tamper_trace,
"prev_event_hash": "",
"event_hash": "",
"payload": {"kind": "tamper_signal", "params": {"signal": "case_open"}},
},
]
_build_bundle(
pass_dir / "tamper_signal_pass",
events_in_file_order=tamper_events,
since_seq=0,
until_seq=1,
seal_id="mv1_tamper_signal_pass",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
)
# --- FAIL fixtures ---
# FAIL: silent denial (intent without outcome) => E_CHAIN_DISCONTINUITY (strict linkage)
silent_events = [
_base_boot_event(
seq=0,
event_id=201,
actor=instance_id,
ts="2025-03-17T03:17:40Z",
trace_id=boot_trace,
),
{
"event_id": _uuid(202),
"seq": 1,
"ts": "2025-03-17T03:17:41Z",
"event_type": "action_intent",
"actor": "did:vm:operator:demo",
"cap_hash": "sha256:bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
"op": "meridian.v1.plc.write",
"op_digest": "",
"result": "ok",
"trace_id": "22222222-2222-4222-8222-222222222223",
"prev_event_hash": "",
"event_hash": "",
"payload": {"kind": "command_requested", "params": {"plc": "plc:demo", "register": "R2", "value": "1"}},
},
]
_build_bundle(
fail_dir / "silent_denial_intent_without_outcome",
events_in_file_order=silent_events,
since_seq=0,
until_seq=1,
seal_id="mv1_silent_denial",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
)
# FAIL: execution without intent => E_CHAIN_DISCONTINUITY (strict linkage)
exec_only_events = [
_base_boot_event(
seq=0,
event_id=211,
actor=instance_id,
ts="2025-03-17T03:17:40Z",
trace_id=boot_trace,
),
{
"event_id": _uuid(212),
"seq": 1,
"ts": "2025-03-17T03:17:41Z",
"event_type": "action_executed",
"actor": instance_id,
"cap_hash": "sha256:cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc",
"op": "meridian.v1.plc.write",
"op_digest": "",
"result": "ok",
"trace_id": "99999999-9999-4999-8999-999999999999",
"prev_event_hash": "",
"event_hash": "",
"payload": {"kind": "command_executed", "params": {"plc": "plc:demo", "register": "R9", "value": "1"}},
},
]
_build_bundle(
fail_dir / "execution_without_intent",
events_in_file_order=exec_only_events,
since_seq=0,
until_seq=1,
seal_id="mv1_execution_without_intent",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
)
# FAIL: double outcome (executed + denied) => E_CHAIN_DISCONTINUITY (strict linkage)
double_trace = "22222222-2222-4222-8222-222222222224"
double_outcome_events = [
_base_boot_event(
seq=0,
event_id=221,
actor=instance_id,
ts="2025-03-17T03:17:40Z",
trace_id=boot_trace,
),
{
"event_id": _uuid(222),
"seq": 1,
"ts": "2025-03-17T03:17:41Z",
"event_type": "action_intent",
"actor": "did:vm:operator:demo",
"cap_hash": "sha256:dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd",
"op": "meridian.v1.plc.write",
"op_digest": "",
"result": "ok",
"trace_id": double_trace,
"prev_event_hash": "",
"event_hash": "",
"payload": {"kind": "command_requested", "params": {"plc": "plc:demo", "register": "R3", "value": "1"}},
},
{
"event_id": _uuid(223),
"seq": 2,
"ts": "2025-03-17T03:17:42Z",
"event_type": "action_executed",
"actor": instance_id,
"cap_hash": "sha256:dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd",
"op": "meridian.v1.plc.write",
"op_digest": "",
"result": "ok",
"trace_id": double_trace,
"prev_event_hash": "",
"event_hash": "",
"payload": {"kind": "command_executed", "params": {"plc": "plc:demo", "register": "R3", "value": "1"}},
},
{
"event_id": _uuid(224),
"seq": 3,
"ts": "2025-03-17T03:17:43Z",
"event_type": "shadow_receipt",
"actor": instance_id,
"cap_hash": "sha256:dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd",
"op": "meridian.v1.plc.write",
"op_digest": "",
"result": "deny",
"trace_id": double_trace,
"prev_event_hash": "",
"event_hash": "",
"payload": {"kind": "command_refused", "reason_code": "policy_denied", "params": {"plc": "plc:demo", "register": "R3", "value": "1"}},
},
]
_build_bundle(
fail_dir / "double_outcome",
events_in_file_order=double_outcome_events,
since_seq=0,
until_seq=3,
seal_id="mv1_double_outcome",
instance_id=instance_id,
created_at="2025-03-17T03:17:44Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
)
# FAIL: event_hash mismatch (tampered stored event_hash) => E_EVENT_HASH_MISMATCH
def _tamper_event_hash(bundle: Path) -> None:
receipts = (bundle / "receipts.jsonl").read_text(encoding="utf-8").splitlines()
objs = [json.loads(l) for l in receipts if l.strip()]
objs[1]["event_hash"] = "sha256:" + ("0" * 64)
_write_receipts_jsonl(bundle / "receipts.jsonl", objs)
_write_integrity_json(
bundle,
include_paths=["receipts.jsonl", "seal.json", "verifier_manifest.json", "roots.txt", "README.md"],
hash_algo=hash_algo,
)
_build_bundle(
fail_dir / "event_hash_mismatch",
events_in_file_order=tamper_events,
since_seq=0,
until_seq=1,
seal_id="mv1_event_hash_mismatch",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
integrity_override=_tamper_event_hash,
)
# FAIL: op_digest mismatch (tampered op_digest but consistent event_hash) => E_EVENT_HASH_MISMATCH
op_digest_bad = "sha256:" + ("f" * 64)
op_digest_events = json.loads(json.dumps(tamper_events))
# Force a wrong op_digest for the tamper_signal event; event_hash will be computed from that wrong value.
op_digest_events[1]["op_digest"] = op_digest_bad
_build_bundle(
fail_dir / "op_digest_mismatch",
events_in_file_order=op_digest_events,
since_seq=0,
until_seq=1,
seal_id="mv1_op_digest_mismatch",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
)
# FAIL: prev_event_hash mismatch (tamper_signal prev hash wrong, but hashes updated) => E_CHAIN_DISCONTINUITY
prev_bad_events = json.loads(json.dumps(tamper_events))
prev_bad_events[1]["prev_event_hash"] = "sha256:" + ("1" * 64)
_build_bundle(
fail_dir / "prev_event_hash_mismatch",
events_in_file_order=prev_bad_events,
since_seq=0,
until_seq=1,
seal_id="mv1_prev_hash_mismatch",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
)
# FAIL: seq non-monotonic (duplicate seq) => E_SEQ_NON_MONOTONIC
dup_events = [
_base_boot_event(
seq=0,
event_id=301,
actor=instance_id,
ts="2025-03-17T03:17:40Z",
trace_id=boot_trace,
),
{
"event_id": _uuid(302),
"seq": 1,
"ts": "2025-03-17T03:17:41Z",
"event_type": "health_event",
"actor": instance_id,
"cap_hash": "none",
"op": "meridian.v1.health",
"op_digest": "",
"result": "ok",
"trace_id": "aaaaaaaa-aaaa-4aaa-8aaa-aaaaaaaaaaaa",
"prev_event_hash": "",
"event_hash": "",
"payload": {"kind": "health_event", "params": {"ok": True}},
},
{
"event_id": _uuid(303),
"seq": 1,
"ts": "2025-03-17T03:17:41Z",
"event_type": "corruption_detected",
"actor": instance_id,
"cap_hash": "none",
"op": "meridian.v1.corruption_detected",
"op_digest": "",
"result": "error",
"trace_id": "bbbbbbbb-bbbb-4bbb-8bbb-bbbbbbbbbbbb",
"prev_event_hash": "",
"event_hash": "",
"payload": {"kind": "corruption_detected", "params": {"component": "storage"}},
},
]
_build_bundle(
fail_dir / "seq_non_monotonic_duplicate",
events_in_file_order=dup_events,
since_seq=0,
until_seq=1,
seal_id="mv1_seq_non_monotonic",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
)
# FAIL: revoked capability used after revoke => E_REVOKED_CAPABILITY_USED
revoked = "sha256:eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"
revoke_trace = "44444444-4444-4444-8444-444444444444"
revoked_events = [
_base_boot_event(
seq=0,
event_id=401,
actor=instance_id,
ts="2025-03-17T03:17:40Z",
trace_id=boot_trace,
),
{
"event_id": _uuid(402),
"seq": 1,
"ts": "2025-03-17T03:17:41Z",
"event_type": "cap_revoke",
"actor": "did:vm:guardian:demo",
"cap_hash": "none",
"op": "meridian.v1.cap.revoke",
"op_digest": "",
"result": "ok",
"trace_id": "55555555-5555-4555-8555-555555555555",
"prev_event_hash": "",
"event_hash": "",
"payload": {"params": {"revoked_cap_hash": revoked}},
},
{
"event_id": _uuid(403),
"seq": 2,
"ts": "2025-03-17T03:17:42Z",
"event_type": "action_intent",
"actor": "did:vm:operator:demo",
"cap_hash": revoked,
"op": "meridian.v1.plc.write",
"op_digest": "",
"result": "ok",
"trace_id": revoke_trace,
"prev_event_hash": "",
"event_hash": "",
"payload": {"kind": "command_requested", "params": {"plc": "plc:demo", "register": "R7", "value": "1"}},
},
{
"event_id": _uuid(404),
"seq": 3,
"ts": "2025-03-17T03:17:43Z",
"event_type": "action_executed",
"actor": instance_id,
"cap_hash": revoked,
"op": "meridian.v1.plc.write",
"op_digest": "",
"result": "ok",
"trace_id": revoke_trace,
"prev_event_hash": "",
"event_hash": "",
"payload": {"kind": "command_executed", "params": {"plc": "plc:demo", "register": "R7", "value": "1"}},
},
]
_build_bundle(
fail_dir / "revoked_capability_used",
events_in_file_order=revoked_events,
since_seq=0,
until_seq=3,
seal_id="mv1_revoked_capability_used",
instance_id=instance_id,
created_at="2025-03-17T03:17:44Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
)
# FAIL: missing required file (roots.txt) => E_MISSING_REQUIRED_FILE
_build_bundle(
fail_dir / "missing_required_file_roots",
events_in_file_order=tamper_events,
since_seq=0,
until_seq=1,
seal_id="mv1_missing_roots",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
omit_roots_file=True,
)
# FAIL: integrity digest mismatch => E_MANIFEST_HASH_MISMATCH
def _wrong_digest(bundle: Path) -> None:
integrity = json.loads((bundle / "integrity.json").read_text(encoding="utf-8"))
for entry in integrity.get("files") or []:
if entry.get("path") == "receipts.jsonl":
entry["digest"] = "sha256:" + ("2" * 64)
_write_json(bundle / "integrity.json", integrity, pretty=True)
_build_bundle(
fail_dir / "manifest_hash_mismatch",
events_in_file_order=tamper_events,
since_seq=0,
until_seq=1,
seal_id="mv1_manifest_hash_mismatch",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
integrity_override=_wrong_digest,
)
# FAIL: receipts JSONL cannot be parsed => E_SCHEMA_INVALID
def _invalid_jsonl(bundle: Path) -> None:
(bundle / "receipts.jsonl").write_text('{"truncated":\n', encoding="utf-8")
_write_integrity_json(
bundle,
include_paths=["receipts.jsonl", "seal.json", "verifier_manifest.json", "roots.txt", "README.md"],
hash_algo=hash_algo,
)
_build_bundle(
fail_dir / "invalid_jsonl_truncated",
events_in_file_order=tamper_events,
since_seq=0,
until_seq=1,
seal_id="mv1_invalid_jsonl_truncated",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
integrity_override=_invalid_jsonl,
)
# FAIL: strict mode forbids unlisted files => E_SCHEMA_INVALID
_build_bundle(
fail_dir / "unlisted_extra_file_strict",
events_in_file_order=tamper_events,
since_seq=0,
until_seq=1,
seal_id="mv1_unlisted_file",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
extra_unlisted_file=True,
)
# FAIL: root mismatch => E_ROOT_MISMATCH
def _tamper_root(bundle: Path) -> None:
seal = json.loads((bundle / "seal.json").read_text(encoding="utf-8"))
bad = "sha256:" + ("3" * 64)
seal["root"]["end"] = bad
_write_json(bundle / "seal.json", seal, pretty=False)
roots_txt = (bundle / "roots.txt").read_text(encoding="utf-8").splitlines()
roots_txt = [ln if not ln.startswith("seq=1 ") else f"seq=1 root={bad}" for ln in roots_txt]
(bundle / "roots.txt").write_text("\n".join(roots_txt) + "\n", encoding="utf-8")
_write_integrity_json(
bundle,
include_paths=["receipts.jsonl", "seal.json", "verifier_manifest.json", "roots.txt", "README.md"],
hash_algo=hash_algo,
)
_build_bundle(
fail_dir / "root_mismatch",
events_in_file_order=tamper_events,
since_seq=0,
until_seq=1,
seal_id="mv1_root_mismatch",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
integrity_override=_tamper_root,
)
# FAIL: range mismatch => E_RANGE_MISMATCH
def _tamper_range(bundle: Path) -> None:
seal = json.loads((bundle / "seal.json").read_text(encoding="utf-8"))
seal["range"]["until_seq"] = 0
seal["root"]["seq"] = 0
_write_json(bundle / "seal.json", seal, pretty=False)
_write_integrity_json(
bundle,
include_paths=["receipts.jsonl", "seal.json", "verifier_manifest.json", "roots.txt", "README.md"],
hash_algo=hash_algo,
)
_build_bundle(
fail_dir / "range_mismatch",
events_in_file_order=tamper_events,
since_seq=0,
until_seq=1,
seal_id="mv1_range_mismatch",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
integrity_override=_tamper_range,
)
# FAIL: canonicalization version unsupported => E_CANON_VERSION_UNSUPPORTED
def _tamper_canon_version(bundle: Path) -> None:
seal = json.loads((bundle / "seal.json").read_text(encoding="utf-8"))
vm = json.loads((bundle / "verifier_manifest.json").read_text(encoding="utf-8"))
seal["canonicalization_version"] = "sentinel-event-jcs-v999"
vm["canonicalization_version"] = "sentinel-event-jcs-v999"
_write_json(bundle / "seal.json", seal, pretty=False)
_write_json(bundle / "verifier_manifest.json", vm, pretty=False)
_write_integrity_json(
bundle,
include_paths=["receipts.jsonl", "seal.json", "verifier_manifest.json", "roots.txt", "README.md"],
hash_algo=hash_algo,
)
_build_bundle(
fail_dir / "canon_version_unsupported",
events_in_file_order=tamper_events,
since_seq=0,
until_seq=1,
seal_id="mv1_canon_version_unsupported",
instance_id=instance_id,
created_at="2025-03-17T03:17:42Z",
canonicalization_version=canonicalization_version,
sentinel_version=sentinel_version,
schema_version=schema_version,
hash_algo=hash_algo,
integrity_override=_tamper_canon_version,
)
print(f"[OK] Fixtures regenerated under {fixtures_dir}")
return 0
if __name__ == "__main__":
raise SystemExit(main())