Initialize repository snapshot
This commit is contained in:
5
.opencode/package.json
Normal file
5
.opencode/package.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"@opencode-ai/plugin": "1.0.166"
|
||||
}
|
||||
}
|
||||
300
.opencode/plugin/vaultmesh-sentinel.ts
Normal file
300
.opencode/plugin/vaultmesh-sentinel.ts
Normal file
@@ -0,0 +1,300 @@
|
||||
import { tool, type Plugin } from "@opencode-ai/plugin";
|
||||
import { spawn } from "node:child_process";
|
||||
import { createHash, randomUUID } from "node:crypto";
|
||||
import { promises as fs, statSync } from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
|
||||
const TOOL_VERSION = "0.2.0";
|
||||
|
||||
type RunResult = {
|
||||
exitCode: number;
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
};
|
||||
|
||||
function normalizeForStableJson(value: unknown): unknown {
|
||||
if (value === null || value === undefined) return value;
|
||||
if (Array.isArray(value)) return value.map((v) => normalizeForStableJson(v));
|
||||
if (typeof value === "object") {
|
||||
const entries = Object.entries(value as Record<string, unknown>).sort(
|
||||
([a], [b]) => a.localeCompare(b),
|
||||
);
|
||||
return entries.reduce<Record<string, unknown>>((acc, [k, v]) => {
|
||||
acc[k] = normalizeForStableJson(v);
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function stableStringify(value: unknown): string {
|
||||
return JSON.stringify(normalizeForStableJson(value));
|
||||
}
|
||||
|
||||
function run(
|
||||
cmd: string,
|
||||
args: string[],
|
||||
opts: { env?: Record<string, string | undefined> } = {},
|
||||
): Promise<RunResult> {
|
||||
return new Promise((resolve) => {
|
||||
const child = spawn(cmd, args, {
|
||||
env: { ...process.env, ...(opts.env ?? {}) },
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
|
||||
child.stdout.on("data", (d) => {
|
||||
stdout += d.toString("utf8");
|
||||
});
|
||||
|
||||
child.stderr.on("data", (d) => {
|
||||
stderr += d.toString("utf8");
|
||||
});
|
||||
|
||||
child.on("close", (code) => {
|
||||
resolve({ exitCode: code ?? 1, stdout, stderr });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function computeBundleHash(bundlePath: string): Promise<string> {
|
||||
const root = path.resolve(bundlePath);
|
||||
const hasher = createHash("sha256");
|
||||
|
||||
async function walk(dir: string): Promise<void> {
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
const sorted = entries.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
for (const entry of sorted) {
|
||||
const abs = path.join(dir, entry.name);
|
||||
const rel = path.relative(root, abs).split(path.sep).join("/");
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
hasher.update(`dir:${rel}\n`);
|
||||
await walk(abs);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.isFile()) {
|
||||
const data = await fs.readFile(abs);
|
||||
const digest = createHash("sha256").update(data).digest("hex");
|
||||
hasher.update(`file:${rel}:${data.length}:${digest}\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await walk(root);
|
||||
return `sha256:${hasher.digest("hex")}`;
|
||||
}
|
||||
|
||||
async function readJsonIfExists(filePath: string): Promise<unknown | null> {
|
||||
try {
|
||||
const raw = await fs.readFile(filePath, "utf8");
|
||||
return JSON.parse(raw);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function resolveWorkspaceRoot(
|
||||
worktree: string | undefined,
|
||||
directory: string,
|
||||
): string {
|
||||
const envRoot = process.env.VAULTMESH_WORKSPACE_ROOT;
|
||||
if (envRoot) return envRoot;
|
||||
|
||||
if (worktree) return path.resolve(worktree, "..");
|
||||
return path.resolve(directory, "..");
|
||||
}
|
||||
|
||||
function resolveVerifierPath(
|
||||
worktree: string | undefined,
|
||||
directory: string,
|
||||
): string {
|
||||
const envVerifier = process.env.VAULTMESH_SENTINEL_VERIFIER;
|
||||
if (envVerifier) return envVerifier;
|
||||
|
||||
const workspaceRoot = resolveWorkspaceRoot(worktree, directory);
|
||||
const candidates = [
|
||||
path.join(
|
||||
workspaceRoot,
|
||||
"vaultmesh-orgine-mobile/tools/vm_verify_sentinel_bundle.py",
|
||||
),
|
||||
path.join(workspaceRoot, "tools/vm_verify_sentinel_bundle.py"),
|
||||
];
|
||||
|
||||
for (const candidate of candidates) {
|
||||
try {
|
||||
if (statSync(candidate).isFile()) {
|
||||
return candidate;
|
||||
}
|
||||
} catch {
|
||||
// continue searching
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: first candidate, even if it does not exist (caller will error deterministically)
|
||||
return candidates[0];
|
||||
}
|
||||
|
||||
export const VaultMeshSentinelPlugin: Plugin = async (ctx) => {
|
||||
const baseDir = ctx.directory ?? process.cwd();
|
||||
const verifierPath = resolveVerifierPath(ctx.worktree, baseDir);
|
||||
|
||||
return {
|
||||
tool: {
|
||||
sentinelVerifyBundle: tool({
|
||||
description:
|
||||
"Verify a VaultMesh Sentinel v1 seal bundle offline (deterministic; no network).",
|
||||
args: {
|
||||
bundlePath: tool.schema
|
||||
.string()
|
||||
.describe("Path to seal bundle directory (contains seal.json)"),
|
||||
strict: tool.schema
|
||||
.boolean()
|
||||
.optional()
|
||||
.default(false)
|
||||
.describe("Enable strict verification (recommended for audits)"),
|
||||
maxFileBytes: tool.schema
|
||||
.number()
|
||||
.int()
|
||||
.optional()
|
||||
.describe(
|
||||
"Reject any single input file larger than this many bytes",
|
||||
),
|
||||
},
|
||||
async execute(args) {
|
||||
const respond = (value: unknown) => stableStringify(value);
|
||||
|
||||
const bundle = path.resolve(baseDir, args.bundlePath);
|
||||
|
||||
const baseResult = {
|
||||
tool: "sentinelVerifyBundle",
|
||||
tool_version: TOOL_VERSION,
|
||||
verifier_path: verifierPath,
|
||||
bundle_path: bundle,
|
||||
bundle_hash: null as string | null,
|
||||
canonicalization_version: null as string | null,
|
||||
schema_version: null as string | null,
|
||||
verifier_version: null as string | null,
|
||||
strict: !!args.strict,
|
||||
exit_code: null as number | null,
|
||||
ok: false,
|
||||
stdout: "",
|
||||
stderr: "",
|
||||
report: null as unknown,
|
||||
};
|
||||
|
||||
try {
|
||||
const st = await fs.stat(bundle);
|
||||
if (!st.isDirectory()) {
|
||||
return respond({
|
||||
...baseResult,
|
||||
error: "BUNDLE_NOT_DIRECTORY",
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
return respond({
|
||||
...baseResult,
|
||||
error: "BUNDLE_NOT_FOUND",
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const st = await fs.stat(verifierPath);
|
||||
if (!st.isFile()) {
|
||||
return respond({
|
||||
...baseResult,
|
||||
error: "VERIFIER_NOT_FILE",
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
return respond({
|
||||
...baseResult,
|
||||
error: "VERIFIER_NOT_FOUND",
|
||||
});
|
||||
}
|
||||
|
||||
baseResult.bundle_hash = await computeBundleHash(bundle);
|
||||
|
||||
const reportPath = path.join(
|
||||
os.tmpdir(),
|
||||
`vm_sentinel_verification_report_${randomUUID()}.json`,
|
||||
);
|
||||
|
||||
const cmdArgs: string[] = [
|
||||
"-u",
|
||||
verifierPath,
|
||||
"--bundle",
|
||||
bundle,
|
||||
"--report",
|
||||
reportPath,
|
||||
];
|
||||
if (args.strict) cmdArgs.push("--strict");
|
||||
if (typeof args.maxFileBytes === "number") {
|
||||
cmdArgs.push("--max-file-bytes", String(args.maxFileBytes));
|
||||
}
|
||||
|
||||
const { exitCode, stdout, stderr } = await run("python3", cmdArgs);
|
||||
const report = await readJsonIfExists(reportPath);
|
||||
|
||||
// Best-effort cleanup: report is returned inline; avoid mutating evidence bundles.
|
||||
await fs.unlink(reportPath).catch(() => {});
|
||||
|
||||
const versions =
|
||||
report && typeof report === "object"
|
||||
? (report as Record<string, unknown>).versions
|
||||
: null;
|
||||
|
||||
const canonicalizationVersion =
|
||||
versions &&
|
||||
typeof versions === "object" &&
|
||||
(versions as Record<string, unknown>).canonicalization_version;
|
||||
const schemaVersion =
|
||||
versions &&
|
||||
typeof versions === "object" &&
|
||||
(versions as Record<string, unknown>).schema_version;
|
||||
|
||||
const declared =
|
||||
report && typeof report === "object"
|
||||
? (report as Record<string, unknown>).declared_verifier
|
||||
: null;
|
||||
const reportedVerifier =
|
||||
report && typeof report === "object"
|
||||
? (report as Record<string, unknown>).verifier
|
||||
: null;
|
||||
|
||||
const verifierVersion =
|
||||
(declared &&
|
||||
typeof declared === "object" &&
|
||||
(declared as Record<string, unknown>).version) ||
|
||||
(reportedVerifier &&
|
||||
typeof reportedVerifier === "object" &&
|
||||
(reportedVerifier as Record<string, unknown>).version) ||
|
||||
null;
|
||||
|
||||
return respond({
|
||||
...baseResult,
|
||||
exit_code: exitCode,
|
||||
ok: exitCode === 0,
|
||||
stdout,
|
||||
stderr,
|
||||
report,
|
||||
canonicalization_version:
|
||||
typeof canonicalizationVersion === "string"
|
||||
? canonicalizationVersion
|
||||
: null,
|
||||
schema_version:
|
||||
typeof schemaVersion === "string" ? schemaVersion : null,
|
||||
verifier_version:
|
||||
typeof verifierVersion === "string" ? verifierVersion : null,
|
||||
error: undefined,
|
||||
});
|
||||
},
|
||||
}),
|
||||
},
|
||||
};
|
||||
};
|
||||
Reference in New Issue
Block a user