Initialize repository snapshot
This commit is contained in:
0
cli/__init__.py
Normal file
0
cli/__init__.py
Normal file
667
cli/ledger.py
Normal file
667
cli/ledger.py
Normal file
@@ -0,0 +1,667 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import sqlite3
|
||||
import uuid
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from ledger.db import (
|
||||
default_db_path,
|
||||
ensure_migrated,
|
||||
log_proof_artifact,
|
||||
new_trace_id,
|
||||
open_db,
|
||||
)
|
||||
|
||||
|
||||
def _parse_since(value: str | None) -> str | None:
|
||||
if not value:
|
||||
return None
|
||||
|
||||
v = value.strip()
|
||||
if not v:
|
||||
return None
|
||||
|
||||
parts = v.split()
|
||||
if len(parts) == 2 and parts[0].isdigit():
|
||||
n = int(parts[0])
|
||||
unit = parts[1].lower()
|
||||
if unit in {"day", "days"}:
|
||||
dt = datetime.now(timezone.utc) - timedelta(days=n)
|
||||
return dt.replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
||||
if unit in {"hour", "hours"}:
|
||||
dt = datetime.now(timezone.utc) - timedelta(hours=n)
|
||||
return dt.replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
||||
if unit in {"minute", "minutes"}:
|
||||
dt = datetime.now(timezone.utc) - timedelta(minutes=n)
|
||||
return dt.replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
||||
|
||||
# Accept "YYYY-MM-DD HH:MM:SS" or ISO8601
|
||||
try:
|
||||
dt = datetime.fromisoformat(v.replace("Z", "+00:00"))
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return (
|
||||
dt.astimezone(timezone.utc)
|
||||
.replace(microsecond=0)
|
||||
.isoformat()
|
||||
.replace("+00:00", "Z")
|
||||
)
|
||||
except Exception:
|
||||
return v
|
||||
|
||||
|
||||
def _fmt_ms(ms: int | None) -> str:
|
||||
return f"{ms}ms" if ms is not None else "-"
|
||||
|
||||
|
||||
def _fmt_ts(ts: str) -> str:
|
||||
# Stored as ISO Z; keep as-is if it already looks like it.
|
||||
if ts.endswith("Z") and "T" in ts:
|
||||
return ts
|
||||
try:
|
||||
# sqlite datetime('now') is "YYYY-MM-DD HH:MM:SS"
|
||||
dt = datetime.strptime(ts, "%Y-%m-%d %H:%M:%S").replace(tzinfo=timezone.utc)
|
||||
return dt.replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
||||
except Exception:
|
||||
return ts
|
||||
|
||||
|
||||
def _one_line(value: str | None, *, max_len: int = 120) -> str | None:
|
||||
if not value:
|
||||
return None
|
||||
line = value.splitlines()[0].strip()
|
||||
if len(line) <= max_len:
|
||||
return line
|
||||
return line[:max_len] + "..."
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _open(db_path: str | None):
|
||||
with open_db(Path(db_path) if db_path else None) as conn:
|
||||
ensure_migrated(conn)
|
||||
yield conn
|
||||
|
||||
|
||||
def cmd_last(args: argparse.Namespace) -> int:
|
||||
with _open(args.db) as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT ts, status, duration_ms, trace_id, kind, label, id, error_text
|
||||
FROM (
|
||||
SELECT ts, status, duration_ms, trace_id,
|
||||
'tool' AS kind,
|
||||
tool_name || COALESCE('.' || action, '') AS label,
|
||||
id,
|
||||
error_text
|
||||
FROM tool_invocations
|
||||
UNION ALL
|
||||
SELECT ts, status, duration_ms, trace_id,
|
||||
'mcp' AS kind,
|
||||
'mcp:' || server_name || '.' || method AS label,
|
||||
id,
|
||||
error_text
|
||||
FROM mcp_calls
|
||||
UNION ALL
|
||||
SELECT ts, 'ok' AS status, NULL AS duration_ms, trace_id,
|
||||
'artifact' AS kind,
|
||||
'artifact:' || kind || COALESCE(' ' || path, '') AS label,
|
||||
id,
|
||||
NULL AS error_text
|
||||
FROM proof_artifacts
|
||||
)
|
||||
ORDER BY ts DESC
|
||||
LIMIT ?;
|
||||
""",
|
||||
(args.n,),
|
||||
).fetchall()
|
||||
|
||||
for row in rows:
|
||||
ts = _fmt_ts(row["ts"])
|
||||
status = row["status"]
|
||||
label = row["label"]
|
||||
duration = _fmt_ms(row["duration_ms"])
|
||||
trace_id = row["trace_id"]
|
||||
err = _one_line(row["error_text"])
|
||||
tail = []
|
||||
if trace_id:
|
||||
tail.append(f"trace={trace_id}")
|
||||
if err and status != "ok":
|
||||
tail.append(err)
|
||||
tail_s = (" " + " ".join(tail)) if tail else ""
|
||||
print(f"{ts} {status:<5} {label:<28} {duration:>6}{tail_s}")
|
||||
return 0
|
||||
|
||||
|
||||
def cmd_trace(args: argparse.Namespace) -> int:
|
||||
with _open(args.db) as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT ts, status, duration_ms, trace_id, kind, label, id, error_text
|
||||
FROM (
|
||||
SELECT ts, status, duration_ms, trace_id,
|
||||
'tool' AS kind,
|
||||
tool_name || COALESCE('.' || action, '') AS label,
|
||||
id,
|
||||
error_text
|
||||
FROM tool_invocations
|
||||
WHERE trace_id = ?
|
||||
UNION ALL
|
||||
SELECT ts, status, duration_ms, trace_id,
|
||||
'mcp' AS kind,
|
||||
'mcp:' || server_name || '.' || method AS label,
|
||||
id,
|
||||
error_text
|
||||
FROM mcp_calls
|
||||
WHERE trace_id = ?
|
||||
UNION ALL
|
||||
SELECT ts, 'ok' AS status, NULL AS duration_ms, trace_id,
|
||||
'artifact' AS kind,
|
||||
'artifact:' || kind || COALESCE(' ' || path, '') AS label,
|
||||
id,
|
||||
NULL AS error_text
|
||||
FROM proof_artifacts
|
||||
WHERE trace_id = ?
|
||||
)
|
||||
ORDER BY ts ASC;
|
||||
""",
|
||||
(args.trace_id, args.trace_id, args.trace_id),
|
||||
).fetchall()
|
||||
|
||||
for row in rows:
|
||||
ts = _fmt_ts(row["ts"])
|
||||
status = row["status"]
|
||||
label = row["label"]
|
||||
duration = _fmt_ms(row["duration_ms"])
|
||||
err = _one_line(row["error_text"])
|
||||
tail = f" {err}" if err and status != "ok" else ""
|
||||
print(f"{ts} {status:<5} {label:<28} {duration:>6}{tail}")
|
||||
return 0
|
||||
|
||||
|
||||
def cmd_tool(args: argparse.Namespace) -> int:
|
||||
since = _parse_since(args.since)
|
||||
params: list[Any] = [args.tool_name]
|
||||
where = "tool_name = ?"
|
||||
if since:
|
||||
where += " AND ts >= ?"
|
||||
params.append(since)
|
||||
|
||||
limit_sql = " LIMIT ?" if args.n is not None else ""
|
||||
if args.n is not None:
|
||||
params.append(args.n)
|
||||
|
||||
with _open(args.db) as conn:
|
||||
rows = conn.execute(
|
||||
f"""
|
||||
SELECT ts, status, duration_ms, trace_id, tool_name, action, error_text
|
||||
FROM tool_invocations
|
||||
WHERE {where}
|
||||
ORDER BY ts DESC{limit_sql};
|
||||
""",
|
||||
tuple(params),
|
||||
).fetchall()
|
||||
|
||||
for row in rows:
|
||||
ts = _fmt_ts(row["ts"])
|
||||
status = row["status"]
|
||||
label = row["tool_name"] + (("." + row["action"]) if row["action"] else "")
|
||||
duration = _fmt_ms(row["duration_ms"])
|
||||
trace_id = row["trace_id"]
|
||||
err = _one_line(row["error_text"])
|
||||
tail = []
|
||||
if trace_id:
|
||||
tail.append(f"trace={trace_id}")
|
||||
if err and status != "ok":
|
||||
tail.append(err)
|
||||
tail_s = (" " + " ".join(tail)) if tail else ""
|
||||
print(f"{ts} {status:<5} {label:<28} {duration:>6}{tail_s}")
|
||||
return 0
|
||||
|
||||
|
||||
def cmd_errors(args: argparse.Namespace) -> int:
|
||||
since = _parse_since(args.since)
|
||||
params: list[Any] = []
|
||||
where = "status != 'ok'"
|
||||
if since:
|
||||
where += " AND ts >= ?"
|
||||
params.append(since)
|
||||
|
||||
with _open(args.db) as conn:
|
||||
rows = conn.execute(
|
||||
f"""
|
||||
SELECT ts, status, duration_ms, trace_id,
|
||||
tool_name || COALESCE('.' || action, '') AS label,
|
||||
error_text
|
||||
FROM tool_invocations
|
||||
WHERE {where}
|
||||
ORDER BY ts DESC
|
||||
LIMIT ?;
|
||||
""",
|
||||
(*params, args.n),
|
||||
).fetchall()
|
||||
|
||||
for row in rows:
|
||||
ts = _fmt_ts(row["ts"])
|
||||
status = row["status"]
|
||||
duration = _fmt_ms(row["duration_ms"])
|
||||
trace_id = row["trace_id"]
|
||||
label = row["label"]
|
||||
err = _one_line(row["error_text"]) or "-"
|
||||
tail = f" trace={trace_id}" if trace_id else ""
|
||||
print(f"{ts} {status:<5} {label:<28} {duration:>6} {err}{tail}")
|
||||
return 0
|
||||
|
||||
|
||||
def cmd_search(args: argparse.Namespace) -> int:
|
||||
term = args.term.strip()
|
||||
if not term:
|
||||
return 2
|
||||
|
||||
like = f"%{term}%"
|
||||
with _open(args.db) as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT ts, status, duration_ms, trace_id,
|
||||
tool_name || COALESCE('.' || action, '') AS label,
|
||||
id
|
||||
FROM tool_invocations
|
||||
WHERE input_json LIKE ? OR output_json LIKE ? OR input_meta_json LIKE ? OR output_meta_json LIKE ?
|
||||
OR error_text LIKE ?
|
||||
ORDER BY ts DESC
|
||||
LIMIT ?;
|
||||
""",
|
||||
(like, like, like, like, like, args.n),
|
||||
).fetchall()
|
||||
|
||||
for row in rows:
|
||||
ts = _fmt_ts(row["ts"])
|
||||
status = row["status"]
|
||||
duration = _fmt_ms(row["duration_ms"])
|
||||
trace_id = row["trace_id"]
|
||||
label = row["label"]
|
||||
rec_id = row["id"]
|
||||
tail = [f"id={rec_id}"]
|
||||
if trace_id:
|
||||
tail.append(f"trace={trace_id}")
|
||||
print(f"{ts} {status:<5} {label:<28} {duration:>6} " + " ".join(tail))
|
||||
return 0
|
||||
|
||||
|
||||
def _sha256_hex_text(value: str | None) -> str:
|
||||
b = (value or "").encode("utf-8")
|
||||
return hashlib.sha256(b).hexdigest()
|
||||
|
||||
|
||||
def _canon(value: Any) -> str:
|
||||
if value is None:
|
||||
return ""
|
||||
if isinstance(value, (int,)):
|
||||
return str(value)
|
||||
if isinstance(value, float):
|
||||
return format(value, ".17g")
|
||||
return str(value)
|
||||
|
||||
|
||||
def _digest_update(hasher: "hashlib._Hash", line: str) -> None:
|
||||
hasher.update(line.encode("utf-8"))
|
||||
hasher.update(b"\n")
|
||||
|
||||
|
||||
def _seal_query_window(
|
||||
args: argparse.Namespace, conn: sqlite3.Connection
|
||||
) -> tuple[dict[str, Any], str]:
|
||||
since = _parse_since(args.since)
|
||||
until = _parse_since(args.until) if getattr(args, "until", None) else None
|
||||
trace_ids: list[str] = [t.strip() for t in (args.trace_id or []) if t and t.strip()]
|
||||
scope = "trace_set" if trace_ids else "time_window"
|
||||
|
||||
def where_ts(prefix: str) -> tuple[str, list[Any]]:
|
||||
clauses: list[str] = []
|
||||
params: list[Any] = []
|
||||
if since:
|
||||
clauses.append(f"datetime({prefix}ts) >= datetime(?)")
|
||||
params.append(since)
|
||||
if until:
|
||||
clauses.append(f"datetime({prefix}ts) <= datetime(?)")
|
||||
params.append(until)
|
||||
if trace_ids:
|
||||
marks = ",".join(["?"] * len(trace_ids))
|
||||
clauses.append(f"{prefix}trace_id IN ({marks})")
|
||||
params.extend(trace_ids)
|
||||
if not clauses:
|
||||
return ("1=1", [])
|
||||
return (" AND ".join(clauses), params)
|
||||
|
||||
hasher = hashlib.sha256()
|
||||
counts: dict[str, int] = {}
|
||||
bounds_min: str | None = None
|
||||
bounds_max: str | None = None
|
||||
|
||||
def note_ts(ts_value: Any) -> None:
|
||||
nonlocal bounds_min, bounds_max
|
||||
ts = _fmt_ts(_canon(ts_value))
|
||||
if not ts:
|
||||
return
|
||||
if bounds_min is None or ts < bounds_min:
|
||||
bounds_min = ts
|
||||
if bounds_max is None or ts > bounds_max:
|
||||
bounds_max = ts
|
||||
|
||||
# tool_invocations
|
||||
where, params = where_ts("")
|
||||
rows = conn.execute(
|
||||
f"""
|
||||
SELECT id, ts, tool_name, action, status, duration_ms, trace_id,
|
||||
input_json, output_json, error_text
|
||||
FROM tool_invocations
|
||||
WHERE {where}
|
||||
ORDER BY datetime(ts) ASC, id ASC;
|
||||
""",
|
||||
tuple(params),
|
||||
).fetchall()
|
||||
for r in rows:
|
||||
note_ts(r["ts"])
|
||||
line = (
|
||||
"tool_invocations"
|
||||
f"|id={_canon(r['id'])}"
|
||||
f"|ts={_fmt_ts(_canon(r['ts']))}"
|
||||
f"|tool_name={_canon(r['tool_name'])}"
|
||||
f"|action={_canon(r['action'])}"
|
||||
f"|status={_canon(r['status'])}"
|
||||
f"|duration_ms={_canon(r['duration_ms'])}"
|
||||
f"|trace_id={_canon(r['trace_id'])}"
|
||||
f"|input_sha256={_sha256_hex_text(r['input_json'])}"
|
||||
f"|output_sha256={_sha256_hex_text(r['output_json'])}"
|
||||
f"|error_sha256={_sha256_hex_text(r['error_text'])}"
|
||||
)
|
||||
_digest_update(hasher, line)
|
||||
counts["tool_invocations"] = len(rows)
|
||||
|
||||
# mcp_calls
|
||||
where, params = where_ts("")
|
||||
rows = conn.execute(
|
||||
f"""
|
||||
SELECT id, ts, server_name, method, tool_name, status, duration_ms, trace_id,
|
||||
request_json, response_json, error_text
|
||||
FROM mcp_calls
|
||||
WHERE {where}
|
||||
ORDER BY datetime(ts) ASC, id ASC;
|
||||
""",
|
||||
tuple(params),
|
||||
).fetchall()
|
||||
for r in rows:
|
||||
note_ts(r["ts"])
|
||||
line = (
|
||||
"mcp_calls"
|
||||
f"|id={_canon(r['id'])}"
|
||||
f"|ts={_fmt_ts(_canon(r['ts']))}"
|
||||
f"|server_name={_canon(r['server_name'])}"
|
||||
f"|method={_canon(r['method'])}"
|
||||
f"|tool_name={_canon(r['tool_name'])}"
|
||||
f"|status={_canon(r['status'])}"
|
||||
f"|duration_ms={_canon(r['duration_ms'])}"
|
||||
f"|trace_id={_canon(r['trace_id'])}"
|
||||
f"|request_sha256={_sha256_hex_text(r['request_json'])}"
|
||||
f"|response_sha256={_sha256_hex_text(r['response_json'])}"
|
||||
f"|error_sha256={_sha256_hex_text(r['error_text'])}"
|
||||
)
|
||||
_digest_update(hasher, line)
|
||||
counts["mcp_calls"] = len(rows)
|
||||
|
||||
# proof_artifacts
|
||||
where, params = where_ts("")
|
||||
rows = conn.execute(
|
||||
f"""
|
||||
SELECT id, ts, kind, path, sha256_hex, blake3_hex, size_bytes, trace_id, meta_json
|
||||
FROM proof_artifacts
|
||||
WHERE {where}
|
||||
ORDER BY datetime(ts) ASC, id ASC;
|
||||
""",
|
||||
tuple(params),
|
||||
).fetchall()
|
||||
for r in rows:
|
||||
note_ts(r["ts"])
|
||||
line = (
|
||||
"proof_artifacts"
|
||||
f"|id={_canon(r['id'])}"
|
||||
f"|ts={_fmt_ts(_canon(r['ts']))}"
|
||||
f"|kind={_canon(r['kind'])}"
|
||||
f"|path={_canon(r['path'])}"
|
||||
f"|sha256_hex={_canon(r['sha256_hex'])}"
|
||||
f"|blake3_hex={_canon(r['blake3_hex'])}"
|
||||
f"|size_bytes={_canon(r['size_bytes'])}"
|
||||
f"|trace_id={_canon(r['trace_id'])}"
|
||||
f"|meta_sha256={_sha256_hex_text(r['meta_json'])}"
|
||||
)
|
||||
_digest_update(hasher, line)
|
||||
counts["proof_artifacts"] = len(rows)
|
||||
|
||||
# shadow_receipts (if present)
|
||||
try:
|
||||
where, params = where_ts("")
|
||||
rows = conn.execute(
|
||||
f"""
|
||||
SELECT id, ts, horizon_id, counterfactual_hash, entropy_delta, reason_unrealized,
|
||||
observer_signature, trace_id, meta_json
|
||||
FROM shadow_receipts
|
||||
WHERE {where}
|
||||
ORDER BY datetime(ts) ASC, id ASC;
|
||||
""",
|
||||
tuple(params),
|
||||
).fetchall()
|
||||
for r in rows:
|
||||
note_ts(r["ts"])
|
||||
line = (
|
||||
"shadow_receipts"
|
||||
f"|id={_canon(r['id'])}"
|
||||
f"|ts={_fmt_ts(_canon(r['ts']))}"
|
||||
f"|horizon_id={_canon(r['horizon_id'])}"
|
||||
f"|counterfactual_hash={_canon(r['counterfactual_hash'])}"
|
||||
f"|entropy_delta={_canon(r['entropy_delta'])}"
|
||||
f"|reason_unrealized={_canon(r['reason_unrealized'])}"
|
||||
f"|observer_signature={_canon(r['observer_signature'])}"
|
||||
f"|trace_id={_canon(r['trace_id'])}"
|
||||
f"|meta_sha256={_sha256_hex_text(r['meta_json'])}"
|
||||
)
|
||||
_digest_update(hasher, line)
|
||||
counts["shadow_receipts"] = len(rows)
|
||||
except sqlite3.OperationalError:
|
||||
counts["shadow_receipts"] = 0
|
||||
|
||||
selection = {
|
||||
"scope": scope,
|
||||
"since": since,
|
||||
"until": until,
|
||||
"trace_ids": trace_ids,
|
||||
"kinds": [
|
||||
"tool_invocations",
|
||||
"mcp_calls",
|
||||
"proof_artifacts",
|
||||
"shadow_receipts",
|
||||
],
|
||||
}
|
||||
digest = {"algorithm": "sha256", "hex": hasher.hexdigest()}
|
||||
bounds = {"min_ts": bounds_min, "max_ts": bounds_max}
|
||||
return (
|
||||
{"selection": selection, "counts": counts, "bounds": bounds, "digest": digest},
|
||||
digest["hex"],
|
||||
)
|
||||
|
||||
|
||||
def _schema_version(conn: sqlite3.Connection) -> tuple[int, str | None]:
|
||||
"""
|
||||
Returns (schema_version_int, last_migration_name).
|
||||
|
||||
schema_version_int is derived from the leading 4-digit prefix in applied migration filenames.
|
||||
"""
|
||||
try:
|
||||
rows = conn.execute("SELECT name FROM migrations;").fetchall()
|
||||
except sqlite3.OperationalError:
|
||||
return (0, None)
|
||||
|
||||
max_v = 0
|
||||
last_name: str | None = None
|
||||
for r in rows:
|
||||
name = r["name"] if isinstance(r, sqlite3.Row) else r[0]
|
||||
if not isinstance(name, str):
|
||||
continue
|
||||
if not last_name or name > last_name:
|
||||
last_name = name
|
||||
prefix = name.split("_", 1)[0]
|
||||
if prefix.isdigit():
|
||||
max_v = max(max_v, int(prefix))
|
||||
return (max_v, last_name)
|
||||
|
||||
|
||||
def cmd_seal(args: argparse.Namespace) -> int:
|
||||
db_path = Path(args.db).expanduser().resolve() if args.db else default_db_path()
|
||||
|
||||
with _open(str(db_path)) as conn:
|
||||
payload, digest_hex = _seal_query_window(args, conn)
|
||||
schema_version, schema_last_migration = _schema_version(conn)
|
||||
|
||||
now = datetime.now(timezone.utc).replace(microsecond=0)
|
||||
ts_tag = now.strftime("%Y%m%dT%H%M%SZ")
|
||||
seal_dir = db_path.parent / "seals"
|
||||
seal_dir.mkdir(parents=True, exist_ok=True)
|
||||
bundle_path = (seal_dir / f"ouroboros_seal_{ts_tag}.json").resolve()
|
||||
|
||||
seal_id = str(uuid.uuid4())
|
||||
trace_id = new_trace_id()
|
||||
|
||||
bundle = {
|
||||
"format": "vm-ouroboros-seal-v0",
|
||||
"schema_version": schema_version,
|
||||
"schema_last_migration": schema_last_migration,
|
||||
"seal_id": seal_id,
|
||||
"sealed_at": now.isoformat().replace("+00:00", "Z"),
|
||||
"digest_algo": payload["digest"]["algorithm"],
|
||||
"selection": payload["selection"],
|
||||
"digest": payload["digest"],
|
||||
"counts": payload["counts"],
|
||||
"bounds": payload["bounds"],
|
||||
"inputs": {
|
||||
"sqlite_db_path": str(db_path),
|
||||
},
|
||||
"trace_id": trace_id,
|
||||
}
|
||||
bundle_path.write_text(
|
||||
json.dumps(bundle, ensure_ascii=False, sort_keys=True, indent=2) + "\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
artifact_id = log_proof_artifact(
|
||||
kind="ouroboros_seal_bundle",
|
||||
path=bundle_path,
|
||||
meta={
|
||||
"seal_id": seal_id,
|
||||
"schema_version": schema_version,
|
||||
"schema_last_migration": schema_last_migration,
|
||||
"digest_algo": payload["digest"]["algorithm"],
|
||||
"digest": payload["digest"],
|
||||
"counts": payload["counts"],
|
||||
"bounds": payload["bounds"],
|
||||
"selection": payload["selection"],
|
||||
},
|
||||
trace_id=trace_id,
|
||||
db_path=db_path,
|
||||
)
|
||||
|
||||
print(f"seal_id={seal_id}")
|
||||
print(f"trace_id={trace_id}")
|
||||
print(f"digest={digest_hex}")
|
||||
print(f"bundle={bundle_path}")
|
||||
print(f"artifact_id={artifact_id}")
|
||||
return 0
|
||||
|
||||
|
||||
def cmd_seals_last(args: argparse.Namespace) -> int:
|
||||
with _open(args.db) as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT ts, id, kind, path, sha256_hex, trace_id
|
||||
FROM proof_artifacts
|
||||
WHERE kind = 'ouroboros_seal_bundle'
|
||||
ORDER BY datetime(ts) DESC, id DESC
|
||||
LIMIT ?;
|
||||
""",
|
||||
(int(args.n),),
|
||||
).fetchall()
|
||||
|
||||
for r in rows:
|
||||
ts = _fmt_ts(r["ts"])
|
||||
print(
|
||||
f"{ts} id={r['id']} sha256={r['sha256_hex'] or '-'} trace={r['trace_id'] or '-'} {r['path'] or ''}"
|
||||
)
|
||||
return 0
|
||||
|
||||
|
||||
def build_parser() -> argparse.ArgumentParser:
|
||||
p = argparse.ArgumentParser(
|
||||
prog="ledger", description="Local-first SQLite ledger (tool + MCP call log)"
|
||||
)
|
||||
p.add_argument("--db", help=f"SQLite path (default: {default_db_path()})")
|
||||
|
||||
sub = p.add_subparsers(dest="cmd", required=True)
|
||||
|
||||
sp = sub.add_parser("last", help="Show last N events")
|
||||
sp.add_argument("--n", type=int, default=50)
|
||||
sp.set_defaults(func=cmd_last)
|
||||
|
||||
sp = sub.add_parser("trace", help="Show an end-to-end trace")
|
||||
sp.add_argument("trace_id")
|
||||
sp.set_defaults(func=cmd_trace)
|
||||
|
||||
sp = sub.add_parser("tool", help="Filter tool invocations by tool name")
|
||||
sp.add_argument("tool_name")
|
||||
sp.add_argument("--since", help='e.g. "2025-12-17 00:00:00" or "7 days"')
|
||||
sp.add_argument("--n", type=int, default=200)
|
||||
sp.set_defaults(func=cmd_tool)
|
||||
|
||||
sp = sub.add_parser("errors", help="Show recent errors")
|
||||
sp.add_argument("--since", help='e.g. "7 days"')
|
||||
sp.add_argument("--n", type=int, default=200)
|
||||
sp.set_defaults(func=cmd_errors)
|
||||
|
||||
sp = sub.add_parser("search", help="Search redacted JSON blobs")
|
||||
sp.add_argument("term")
|
||||
sp.add_argument("--n", type=int, default=200)
|
||||
sp.set_defaults(func=cmd_search)
|
||||
|
||||
sp = sub.add_parser(
|
||||
"seal", help="Create an Ouroboros seal bundle (deterministic digest)"
|
||||
)
|
||||
sp.add_argument("--since", help='e.g. "7 days" or ISO8601')
|
||||
sp.add_argument("--until", help='e.g. "2025-12-17 00:00:00" or ISO8601')
|
||||
sp.add_argument(
|
||||
"--trace-id",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Limit sealing to one or more trace ids (repeatable).",
|
||||
)
|
||||
sp.set_defaults(func=cmd_seal)
|
||||
|
||||
sp = sub.add_parser("seals", help="Seal bundle utilities")
|
||||
seals_sub = sp.add_subparsers(dest="seals_cmd", required=True)
|
||||
|
||||
sp2 = seals_sub.add_parser("last", help="Show last N seal bundles")
|
||||
sp2.add_argument("--n", type=int, default=10)
|
||||
sp2.set_defaults(func=cmd_seals_last)
|
||||
|
||||
return p
|
||||
|
||||
|
||||
def main(argv: list[str] | None = None) -> int:
|
||||
parser = build_parser()
|
||||
args = parser.parse_args(argv)
|
||||
return int(args.func(args))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
550
cli/skill_validator.py
Executable file
550
cli/skill_validator.py
Executable file
@@ -0,0 +1,550 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
VaultMesh Claude Skill Validator + Receipt Emitter
|
||||
|
||||
Checks that the VaultMesh skill is correctly installed under:
|
||||
~/.claude/skills/vaultmesh/
|
||||
|
||||
Validates:
|
||||
- Directory exists
|
||||
- All expected files present and non-empty
|
||||
- SKILL.md has valid YAML frontmatter
|
||||
- Supporting docs are properly linked
|
||||
- Emits:
|
||||
- VAULTMESH_SKILL_ROOT.txt (BLAKE3 integrity hash)
|
||||
- VaultMesh Automation scroll receipts for each run:
|
||||
automation_vm_skill_validate_success
|
||||
automation_vm_skill_validate_warning
|
||||
automation_vm_skill_validate_failure
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from dataclasses import dataclass, asdict
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Dict, Tuple
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Path configuration (self-rooting)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
THIS_FILE = Path(__file__).resolve()
|
||||
CLI_DIR = THIS_FILE.parent # /root/work/vaultmesh/cli
|
||||
REPO_ROOT = THIS_FILE.parents[1] # /root/work/vaultmesh
|
||||
|
||||
# Allow override via env var, but default to auto-detected repo root
|
||||
VM_ROOT = Path(os.environ.get("VAULTMESH_ROOT", REPO_ROOT)).resolve()
|
||||
RECEIPTS_ROOT = Path(os.environ.get("VAULTMESH_RECEIPTS_ROOT", VM_ROOT / "receipts"))
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Configuration
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
EXPECTED_FILES = [
|
||||
"SKILL.md",
|
||||
"QUICK_REFERENCE.md",
|
||||
"OPERATIONS.md",
|
||||
"MCP_INTEGRATION.md",
|
||||
"PROTOCOLS.md",
|
||||
"ALCHEMICAL_PATTERNS.md",
|
||||
"INFRASTRUCTURE.md",
|
||||
"CODE_TEMPLATES.md",
|
||||
"ENGINE_SPECS.md",
|
||||
]
|
||||
|
||||
SUPPORTING_DOC_LINKS = {
|
||||
"QUICK_REFERENCE.md": "Quick Reference",
|
||||
"OPERATIONS.md": "Operations Guide",
|
||||
"MCP_INTEGRATION.md": "MCP Integration",
|
||||
"PROTOCOLS.md": "Protocols",
|
||||
"ALCHEMICAL_PATTERNS.md": "Alchemical Patterns",
|
||||
"INFRASTRUCTURE.md": "Infrastructure",
|
||||
"CODE_TEMPLATES.md": "Code Templates",
|
||||
"ENGINE_SPECS.md": "Engine Specs",
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class CheckResult:
|
||||
name: str
|
||||
status: str # "ok", "warn", "fail"
|
||||
details: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ValidationReport:
|
||||
skill_dir: str
|
||||
checks: List[CheckResult]
|
||||
overall_status: str # "ok", "warn", "fail"
|
||||
hash_algorithm: Optional[str] = None
|
||||
root_hash: Optional[str] = None
|
||||
|
||||
def to_dict(self) -> Dict:
|
||||
return {
|
||||
"skill_dir": self.skill_dir,
|
||||
"overall_status": self.overall_status,
|
||||
"hash_algorithm": self.hash_algorithm,
|
||||
"root_hash": self.root_hash,
|
||||
"checks": [asdict(c) for c in self.checks],
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Hashing helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def load_hasher():
|
||||
"""Return (name, constructor) for hash function (blake3 preferred)."""
|
||||
try:
|
||||
import blake3 # type: ignore
|
||||
return "blake3", blake3.blake3
|
||||
except Exception:
|
||||
import hashlib
|
||||
return "sha256", hashlib.sha256
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Basic checks
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def check_dir_exists(skill_dir: Path) -> CheckResult:
|
||||
if skill_dir.is_dir():
|
||||
return CheckResult(
|
||||
name="skill_dir_exists",
|
||||
status="ok",
|
||||
details=f"Found skill directory at {skill_dir}",
|
||||
)
|
||||
return CheckResult(
|
||||
name="skill_dir_exists",
|
||||
status="fail",
|
||||
details=f"Skill directory not found: {skill_dir}",
|
||||
)
|
||||
|
||||
|
||||
def check_expected_files(skill_dir: Path) -> List[CheckResult]:
|
||||
results = []
|
||||
for fname in EXPECTED_FILES:
|
||||
path = skill_dir / fname
|
||||
if not path.exists():
|
||||
results.append(
|
||||
CheckResult(
|
||||
name=f"file_missing:{fname}",
|
||||
status="fail",
|
||||
details=f"Expected file missing: {fname}",
|
||||
)
|
||||
)
|
||||
elif path.stat().st_size == 0:
|
||||
results.append(
|
||||
CheckResult(
|
||||
name=f"file_empty:{fname}",
|
||||
status="warn",
|
||||
details=f"File present but empty: {fname}",
|
||||
)
|
||||
)
|
||||
else:
|
||||
results.append(
|
||||
CheckResult(
|
||||
name=f"file_ok:{fname}",
|
||||
status="ok",
|
||||
details=f"File present: {fname}",
|
||||
)
|
||||
)
|
||||
return results
|
||||
|
||||
|
||||
FRONTMATTER_RE = re.compile(
|
||||
r"^---\s*\n(.*?)\n---\s*\n",
|
||||
re.DOTALL,
|
||||
)
|
||||
|
||||
|
||||
def parse_frontmatter(text: str) -> Optional[Dict[str, str]]:
|
||||
m = FRONTMATTER_RE.match(text)
|
||||
if not m:
|
||||
return None
|
||||
body = m.group(1)
|
||||
data: Dict[str, str] = {}
|
||||
for line in body.splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
if ":" not in line:
|
||||
continue
|
||||
key, value = line.split(":", 1)
|
||||
data[key.strip()] = value.strip().strip('"').strip("'")
|
||||
return data
|
||||
|
||||
|
||||
def check_skill_md(skill_dir: Path) -> List[CheckResult]:
|
||||
results: List[CheckResult] = []
|
||||
path = skill_dir / "SKILL.md"
|
||||
if not path.exists():
|
||||
return [
|
||||
CheckResult(
|
||||
name="skill_md_exists",
|
||||
status="fail",
|
||||
details="SKILL.md is missing",
|
||||
)
|
||||
]
|
||||
|
||||
text = path.read_text(encoding="utf-8")
|
||||
|
||||
fm = parse_frontmatter(text)
|
||||
if fm is None:
|
||||
results.append(
|
||||
CheckResult(
|
||||
name="skill_md_frontmatter",
|
||||
status="fail",
|
||||
details="YAML frontmatter block (--- ... ---) not found at top of SKILL.md",
|
||||
)
|
||||
)
|
||||
return results
|
||||
|
||||
# name
|
||||
if fm.get("name") == "vaultmesh":
|
||||
results.append(
|
||||
CheckResult(
|
||||
name="skill_md_name",
|
||||
status="ok",
|
||||
details='Frontmatter name is "vaultmesh".',
|
||||
)
|
||||
)
|
||||
else:
|
||||
results.append(
|
||||
CheckResult(
|
||||
name="skill_md_name",
|
||||
status="fail",
|
||||
details=f'Frontmatter "name" should be "vaultmesh", got {fm.get("name")!r}',
|
||||
)
|
||||
)
|
||||
|
||||
# description
|
||||
desc = fm.get("description", "").strip()
|
||||
if desc:
|
||||
results.append(
|
||||
CheckResult(
|
||||
name="skill_md_description",
|
||||
status="ok",
|
||||
details=f"Description present ({len(desc)} chars).",
|
||||
)
|
||||
)
|
||||
else:
|
||||
results.append(
|
||||
CheckResult(
|
||||
name="skill_md_description",
|
||||
status="fail",
|
||||
details="Frontmatter 'description' is missing or empty.",
|
||||
)
|
||||
)
|
||||
|
||||
# Supporting doc links
|
||||
link_checks = check_supporting_links(text)
|
||||
results.extend(link_checks)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def check_supporting_links(skill_md_text: str) -> List[CheckResult]:
|
||||
results: List[CheckResult] = []
|
||||
|
||||
# Simple markdown link regex: [Label](FILE.md)
|
||||
link_re = re.compile(r"\[([^\]]+)\]\(([^)]+)\)")
|
||||
|
||||
found_links: Dict[str, str] = {}
|
||||
for label, target in link_re.findall(skill_md_text):
|
||||
found_links[target] = label
|
||||
|
||||
for fname, expected_label in SUPPORTING_DOC_LINKS.items():
|
||||
if fname not in found_links:
|
||||
results.append(
|
||||
CheckResult(
|
||||
name=f"link_missing:{fname}",
|
||||
status="fail",
|
||||
details=f"Missing markdown link to {fname} in SKILL.md",
|
||||
)
|
||||
)
|
||||
else:
|
||||
label = found_links[fname]
|
||||
# Only warn if label is very different
|
||||
if expected_label.lower() not in label.lower():
|
||||
results.append(
|
||||
CheckResult(
|
||||
name=f"link_label_warn:{fname}",
|
||||
status="warn",
|
||||
details=(
|
||||
f"Link to {fname} present but label is '{label}', "
|
||||
f"expected something like '{expected_label}'."
|
||||
),
|
||||
)
|
||||
)
|
||||
else:
|
||||
results.append(
|
||||
CheckResult(
|
||||
name=f"link_ok:{fname}",
|
||||
status="ok",
|
||||
details=f"Link to {fname} present with label '{label}'.",
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Integrity root for the skill (VAULTMESH_SKILL_ROOT.txt)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def compute_skill_root_hash(skill_dir: Path) -> Tuple[str, str]:
|
||||
algo_name, hasher_ctor = load_hasher()
|
||||
h = hasher_ctor()
|
||||
|
||||
# Sort to keep deterministic ordering
|
||||
for fname in sorted(EXPECTED_FILES):
|
||||
path = skill_dir / fname
|
||||
if not path.exists():
|
||||
continue
|
||||
h.update(fname.encode("utf-8"))
|
||||
h.update(b"\0")
|
||||
with path.open("rb") as f:
|
||||
while True:
|
||||
chunk = f.read(8192)
|
||||
if not chunk:
|
||||
break
|
||||
h.update(chunk)
|
||||
|
||||
digest = h.hexdigest()
|
||||
return algo_name, digest
|
||||
|
||||
|
||||
def write_skill_root_file(skill_dir: Path, algo_name: str, digest: str) -> CheckResult:
|
||||
out_path = skill_dir / "VAULTMESH_SKILL_ROOT.txt"
|
||||
if algo_name.lower() == "blake3":
|
||||
value = f"blake3:{digest}\n"
|
||||
else:
|
||||
value = f"{algo_name}:{digest}\n"
|
||||
out_path.write_text(value, encoding="utf-8")
|
||||
return CheckResult(
|
||||
name="root_file_written",
|
||||
status="ok",
|
||||
details=f"Wrote integrity root to {out_path} using {algo_name}.",
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Automation scroll receipt emission
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _now_iso() -> str:
|
||||
return datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
|
||||
|
||||
def _read_last_receipt(receipts_path: Path) -> Optional[Dict]:
|
||||
"""Read the last receipt from the JSONL scroll file."""
|
||||
if not receipts_path.exists():
|
||||
return None
|
||||
try:
|
||||
with receipts_path.open("r", encoding="utf-8") as f:
|
||||
last_line = None
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line:
|
||||
last_line = line
|
||||
if not last_line:
|
||||
return None
|
||||
return json.loads(last_line)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def emit_automation_receipt(
|
||||
event_type: str,
|
||||
report: ValidationReport,
|
||||
skill_root_algo: str,
|
||||
skill_root_hash: str,
|
||||
) -> None:
|
||||
"""
|
||||
Emit a VaultMesh Automation scroll receipt for this validator run.
|
||||
|
||||
Scroll: Automation
|
||||
Types:
|
||||
- automation_vm_skill_validate_success
|
||||
- automation_vm_skill_validate_warning
|
||||
- automation_vm_skill_validate_failure
|
||||
"""
|
||||
|
||||
scroll_name = "Automation"
|
||||
scroll_dir = RECEIPTS_ROOT / "automation"
|
||||
scroll_dir.mkdir(parents=True, exist_ok=True)
|
||||
receipts_path = scroll_dir / "automation_events.jsonl"
|
||||
|
||||
# Determine sequence and previous_hash
|
||||
last = _read_last_receipt(receipts_path)
|
||||
if last is None:
|
||||
sequence = 0
|
||||
previous_hash = None
|
||||
else:
|
||||
sequence = int(last.get("meta", {}).get("sequence", -1)) + 1
|
||||
previous_hash = last.get("header", {}).get("root_hash")
|
||||
|
||||
# Body snapshot (we keep it compact)
|
||||
body = {
|
||||
"skill_dir": report.skill_dir,
|
||||
"hash_algorithm": skill_root_algo,
|
||||
"root_hash": f"blake3:{skill_root_hash}"
|
||||
if skill_root_algo.lower() == "blake3"
|
||||
else f"{skill_root_algo}:{skill_root_hash}",
|
||||
"overall_status": report.overall_status,
|
||||
"checks": [
|
||||
{
|
||||
"name": c.name,
|
||||
"status": c.status,
|
||||
}
|
||||
for c in report.checks
|
||||
],
|
||||
}
|
||||
|
||||
# Build receipt (schema v2-style)
|
||||
timestamp = _now_iso()
|
||||
receipt = {
|
||||
"schema_version": "2.0.0",
|
||||
"type": event_type,
|
||||
"timestamp": timestamp,
|
||||
"header": {
|
||||
"root_hash": None, # filled after hash
|
||||
"tags": [
|
||||
"vaultmesh_skill",
|
||||
"validator",
|
||||
f"status:{report.overall_status}",
|
||||
],
|
||||
"previous_hash": previous_hash,
|
||||
},
|
||||
"meta": {
|
||||
"scroll": scroll_name,
|
||||
"sequence": sequence,
|
||||
"anchor_epoch": None,
|
||||
"proof_path": None,
|
||||
},
|
||||
"body": body,
|
||||
}
|
||||
|
||||
# Compute receipt hash over canonical JSON
|
||||
algo_name, hasher_ctor = load_hasher()
|
||||
h = hasher_ctor()
|
||||
encoded = json.dumps(receipt, sort_keys=True, separators=(",", ":")).encode("utf-8")
|
||||
h.update(encoded)
|
||||
digest = h.hexdigest()
|
||||
if algo_name.lower() == "blake3":
|
||||
receipt_hash = f"blake3:{digest}"
|
||||
else:
|
||||
receipt_hash = f"{algo_name}:{digest}"
|
||||
|
||||
receipt["header"]["root_hash"] = receipt_hash
|
||||
|
||||
# Append to scroll file
|
||||
with receipts_path.open("a", encoding="utf-8") as f:
|
||||
f.write(json.dumps(receipt, separators=(",", ":")) + "\n")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Aggregation + main
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def aggregate_status(checks: List[CheckResult]) -> str:
|
||||
worst = "ok"
|
||||
for c in checks:
|
||||
if c.status == "fail":
|
||||
return "fail"
|
||||
if c.status == "warn" and worst == "ok":
|
||||
worst = "warn"
|
||||
return worst
|
||||
|
||||
|
||||
def main(argv: List[str]) -> int:
|
||||
if len(argv) > 2:
|
||||
print(f"Usage: {argv[0]} [skill_dir]", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
if len(argv) == 2:
|
||||
skill_dir = Path(argv[1]).expanduser()
|
||||
else:
|
||||
skill_dir = Path("~/.claude/skills/vaultmesh").expanduser()
|
||||
|
||||
checks: List[CheckResult] = []
|
||||
|
||||
# 1. Directory
|
||||
dir_check = check_dir_exists(skill_dir)
|
||||
checks.append(dir_check)
|
||||
if dir_check.status == "fail":
|
||||
report = ValidationReport(
|
||||
skill_dir=str(skill_dir),
|
||||
checks=checks,
|
||||
overall_status="fail",
|
||||
)
|
||||
print(json.dumps(report.to_dict(), indent=2))
|
||||
# No receipt emitted if the skill dir doesn't exist
|
||||
return 2
|
||||
|
||||
# 2. Files
|
||||
checks.extend(check_expected_files(skill_dir))
|
||||
|
||||
# 3. SKILL.md + links
|
||||
checks.extend(check_skill_md(skill_dir))
|
||||
|
||||
# 4. Skill integrity root
|
||||
skill_algo, skill_digest = compute_skill_root_hash(skill_dir)
|
||||
checks.append(
|
||||
CheckResult(
|
||||
name="hash_algorithm",
|
||||
status="ok" if skill_algo == "blake3" else "warn",
|
||||
details=(
|
||||
f"Using {skill_algo} for integrity hash "
|
||||
+ ("(preferred)." if skill_algo == "blake3" else "(BLAKE3 not available, using fallback.)")
|
||||
),
|
||||
)
|
||||
)
|
||||
checks.append(write_skill_root_file(skill_dir, skill_algo, skill_digest))
|
||||
|
||||
overall = aggregate_status(checks)
|
||||
|
||||
report = ValidationReport(
|
||||
skill_dir=str(skill_dir),
|
||||
checks=checks,
|
||||
overall_status=overall,
|
||||
hash_algorithm=skill_algo,
|
||||
root_hash=skill_digest,
|
||||
)
|
||||
|
||||
# Print JSON report to stdout
|
||||
print(json.dumps(report.to_dict(), indent=2))
|
||||
|
||||
# Emit Automation scroll receipt
|
||||
if overall == "ok":
|
||||
event_type = "automation_vm_skill_validate_success"
|
||||
elif overall == "warn":
|
||||
event_type = "automation_vm_skill_validate_warning"
|
||||
else:
|
||||
event_type = "automation_vm_skill_validate_failure"
|
||||
|
||||
try:
|
||||
emit_automation_receipt(
|
||||
event_type=event_type,
|
||||
report=report,
|
||||
skill_root_algo=skill_algo,
|
||||
skill_root_hash=skill_digest,
|
||||
)
|
||||
except Exception as e:
|
||||
# We don't want receipt emission failures to hide validation output,
|
||||
# so just log to stderr and keep the original exit code.
|
||||
print(f"WARNING: failed to emit automation receipt: {e}", file=sys.stderr)
|
||||
|
||||
if overall == "ok":
|
||||
return 0
|
||||
if overall == "warn":
|
||||
return 1
|
||||
return 2
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main(sys.argv))
|
||||
2816
cli/vm_cli.py
Executable file
2816
cli/vm_cli.py
Executable file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user