Initial commit - combined iTerm2 scripts
Contains: - 1m-brag - tem - VaultMesh_Catalog_v1 - VAULTMESH-ETERNAL-PATTERN 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,424 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
PQC Integration Budget & Person-Month Checker
|
||||
|
||||
Purpose:
|
||||
Validates consortium budget and person-month allocations from consortium-tracker.csv
|
||||
against PQC Integration proposal constraints:
|
||||
- Total budget: €2,800,000 (€2.8M)
|
||||
- Total person-months: 104 PM baseline (112 PM with 10% buffer)
|
||||
- Budget distribution: VaultMesh 70.4%, Brno 10%, Cyber Trust 12.5%, France 7.1%
|
||||
|
||||
Usage:
|
||||
python3 budget_checker.py
|
||||
|
||||
Expected CSV structure (from consortium-tracker.csv):
|
||||
Partner Name, Country, Type, Budget (EUR), Person-Months, LOI Status, ...
|
||||
|
||||
Author: VaultMesh Technologies B.V.
|
||||
Version: 1.0
|
||||
Date: 2025-11-06
|
||||
"""
|
||||
|
||||
import csv
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class CheckStatus(Enum):
|
||||
"""Status codes for validation checks."""
|
||||
PASS = "✓ PASS"
|
||||
WARN = "⚠ WARN"
|
||||
FAIL = "✗ FAIL"
|
||||
|
||||
|
||||
@dataclass
|
||||
class PartnerAllocation:
|
||||
"""Partner budget and person-month allocation."""
|
||||
name: str
|
||||
country: str
|
||||
partner_type: str
|
||||
budget_eur: int
|
||||
person_months: float
|
||||
loi_status: str
|
||||
budget_pct: float = 0.0
|
||||
pm_fte_avg: float = 0.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class ValidationResult:
|
||||
"""Result of a validation check."""
|
||||
check_name: str
|
||||
status: CheckStatus
|
||||
expected: str
|
||||
actual: str
|
||||
details: str = ""
|
||||
|
||||
|
||||
class BudgetChecker:
|
||||
"""Validates PQC Integration budget and person-month allocations."""
|
||||
|
||||
# Proposal constraints
|
||||
TOTAL_BUDGET_EUR = 2_800_000 # €2.8M total
|
||||
BASELINE_PM = 104 # Baseline person-months
|
||||
BUFFERED_PM = 112 # With 10% buffer
|
||||
PROJECT_MONTHS = 24 # 24-month duration
|
||||
|
||||
# Expected budget distribution (from PQC_Submission_Checklist.md)
|
||||
EXPECTED_BUDGET_PCT = {
|
||||
"VaultMesh Technologies B.V.": 70.4,
|
||||
"Masaryk University": 10.0,
|
||||
"Cyber Trust S.A.": 12.5,
|
||||
"Public Digital Services Agency": 7.1,
|
||||
}
|
||||
|
||||
# Tolerances
|
||||
BUDGET_TOLERANCE_PCT = 2.0 # ±2% tolerance for budget distribution
|
||||
PM_TOLERANCE_PCT = 5.0 # ±5% tolerance for person-months
|
||||
|
||||
def __init__(self, csv_path: Path):
|
||||
"""Initialize checker with path to consortium tracker CSV."""
|
||||
self.csv_path = csv_path
|
||||
self.partners: List[PartnerAllocation] = []
|
||||
self.results: List[ValidationResult] = []
|
||||
|
||||
def load_csv(self) -> bool:
|
||||
"""Load partner data from CSV file."""
|
||||
if not self.csv_path.exists():
|
||||
print(f"✗ ERROR: CSV file not found: {self.csv_path}")
|
||||
return False
|
||||
|
||||
try:
|
||||
with open(self.csv_path, 'r', encoding='utf-8') as f:
|
||||
reader = csv.DictReader(f)
|
||||
for row in reader:
|
||||
# Only process rows for PQC Integration proposal
|
||||
# CSV uses "Proposal Track" column
|
||||
if 'PQC' not in row.get('Proposal Track', ''):
|
||||
continue
|
||||
|
||||
# Parse budget (remove € symbol and commas)
|
||||
budget_str = row.get('Budget (€)', '0').replace('€', '').replace(',', '').strip()
|
||||
try:
|
||||
budget = int(budget_str) if budget_str else 0
|
||||
except ValueError:
|
||||
print(f"⚠ WARNING: Invalid budget for {row.get('Partner Name')}: {budget_str}")
|
||||
budget = 0
|
||||
|
||||
# Parse person-months
|
||||
pm_str = row.get('Person-Months', '0').strip()
|
||||
try:
|
||||
pm = float(pm_str) if pm_str else 0.0
|
||||
except ValueError:
|
||||
print(f"⚠ WARNING: Invalid person-months for {row.get('Partner Name')}: {pm_str}")
|
||||
pm = 0.0
|
||||
|
||||
partner = PartnerAllocation(
|
||||
name=row.get('Partner Name', 'Unknown').strip(),
|
||||
country=row.get('Country', 'Unknown').strip(),
|
||||
partner_type=row.get('Type', 'Unknown').strip(),
|
||||
budget_eur=budget,
|
||||
person_months=pm,
|
||||
loi_status=row.get('LOI Status', 'Unknown').strip(),
|
||||
)
|
||||
self.partners.append(partner)
|
||||
|
||||
if not self.partners:
|
||||
print("✗ ERROR: No PQC Integration partners found in CSV")
|
||||
return False
|
||||
|
||||
print(f"✓ Loaded {len(self.partners)} partners from {self.csv_path.name}\n")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ ERROR loading CSV: {e}")
|
||||
return False
|
||||
|
||||
def calculate_totals(self) -> Tuple[int, float]:
|
||||
"""Calculate total budget and person-months."""
|
||||
total_budget = sum(p.budget_eur for p in self.partners)
|
||||
total_pm = sum(p.person_months for p in self.partners)
|
||||
|
||||
# Calculate percentages and FTE averages
|
||||
for partner in self.partners:
|
||||
partner.budget_pct = (partner.budget_eur / total_budget * 100) if total_budget > 0 else 0.0
|
||||
partner.pm_fte_avg = partner.person_months / self.PROJECT_MONTHS
|
||||
|
||||
return total_budget, total_pm
|
||||
|
||||
def check_total_budget(self, actual_budget: int) -> ValidationResult:
|
||||
"""Validate total budget against proposal constraint."""
|
||||
expected = f"€{self.TOTAL_BUDGET_EUR:,}"
|
||||
actual = f"€{actual_budget:,}"
|
||||
|
||||
if actual_budget == self.TOTAL_BUDGET_EUR:
|
||||
status = CheckStatus.PASS
|
||||
details = "Budget matches proposal exactly"
|
||||
elif abs(actual_budget - self.TOTAL_BUDGET_EUR) / self.TOTAL_BUDGET_EUR * 100 < self.BUDGET_TOLERANCE_PCT:
|
||||
status = CheckStatus.WARN
|
||||
variance_pct = (actual_budget - self.TOTAL_BUDGET_EUR) / self.TOTAL_BUDGET_EUR * 100
|
||||
details = f"Budget variance: {variance_pct:+.1f}% (within tolerance)"
|
||||
else:
|
||||
status = CheckStatus.FAIL
|
||||
variance = actual_budget - self.TOTAL_BUDGET_EUR
|
||||
details = f"Budget off by €{variance:,} ({variance/self.TOTAL_BUDGET_EUR*100:+.1f}%)"
|
||||
|
||||
return ValidationResult(
|
||||
check_name="Total Budget",
|
||||
status=status,
|
||||
expected=expected,
|
||||
actual=actual,
|
||||
details=details
|
||||
)
|
||||
|
||||
def check_total_person_months(self, actual_pm: float) -> ValidationResult:
|
||||
"""Validate total person-months against baseline/buffered targets."""
|
||||
expected = f"{self.BASELINE_PM} PM (baseline) / {self.BUFFERED_PM} PM (buffered)"
|
||||
actual = f"{actual_pm:.1f} PM"
|
||||
|
||||
if self.BASELINE_PM <= actual_pm <= self.BUFFERED_PM:
|
||||
status = CheckStatus.PASS
|
||||
details = f"Within baseline-buffered range ({actual_pm/self.PROJECT_MONTHS:.1f} FTE avg)"
|
||||
elif actual_pm < self.BASELINE_PM:
|
||||
status = CheckStatus.WARN
|
||||
shortage = self.BASELINE_PM - actual_pm
|
||||
details = f"Below baseline by {shortage:.1f} PM (may underdeliver)"
|
||||
else:
|
||||
status = CheckStatus.FAIL
|
||||
excess = actual_pm - self.BUFFERED_PM
|
||||
details = f"Exceeds buffer by {excess:.1f} PM (over budget risk)"
|
||||
|
||||
return ValidationResult(
|
||||
check_name="Total Person-Months",
|
||||
status=status,
|
||||
expected=expected,
|
||||
actual=actual,
|
||||
details=details
|
||||
)
|
||||
|
||||
def check_budget_distribution(self) -> List[ValidationResult]:
|
||||
"""Validate per-partner budget percentages against expected distribution."""
|
||||
results = []
|
||||
|
||||
for partner in self.partners:
|
||||
# Find expected percentage (match by partner name prefix)
|
||||
expected_pct = None
|
||||
for expected_name, pct in self.EXPECTED_BUDGET_PCT.items():
|
||||
if expected_name in partner.name or partner.name in expected_name:
|
||||
expected_pct = pct
|
||||
break
|
||||
|
||||
if expected_pct is None:
|
||||
results.append(ValidationResult(
|
||||
check_name=f"Budget % ({partner.name})",
|
||||
status=CheckStatus.WARN,
|
||||
expected="N/A",
|
||||
actual=f"{partner.budget_pct:.1f}%",
|
||||
details="Partner not in expected distribution list"
|
||||
))
|
||||
continue
|
||||
|
||||
# Check if actual matches expected within tolerance
|
||||
variance = abs(partner.budget_pct - expected_pct)
|
||||
|
||||
if variance < self.BUDGET_TOLERANCE_PCT:
|
||||
status = CheckStatus.PASS
|
||||
details = f"Matches expected ({variance:.1f}% variance)"
|
||||
elif variance < self.BUDGET_TOLERANCE_PCT * 2:
|
||||
status = CheckStatus.WARN
|
||||
details = f"Slightly off ({variance:.1f}% variance, {partner.budget_pct - expected_pct:+.1f}%)"
|
||||
else:
|
||||
status = CheckStatus.FAIL
|
||||
details = f"Significant deviation ({variance:.1f}% variance, {partner.budget_pct - expected_pct:+.1f}%)"
|
||||
|
||||
results.append(ValidationResult(
|
||||
check_name=f"Budget % ({partner.name})",
|
||||
status=status,
|
||||
expected=f"{expected_pct:.1f}%",
|
||||
actual=f"{partner.budget_pct:.1f}%",
|
||||
details=details
|
||||
))
|
||||
|
||||
return results
|
||||
|
||||
def check_loi_status(self) -> List[ValidationResult]:
|
||||
"""Validate LOI status for all partners."""
|
||||
results = []
|
||||
|
||||
for partner in self.partners:
|
||||
expected = "Confirmed/Signed/Sent/Coordinator"
|
||||
actual = partner.loi_status
|
||||
|
||||
if actual.lower() in ['confirmed', 'signed', 'sent', 'coordinator']:
|
||||
status = CheckStatus.PASS
|
||||
details = "LOI confirmed" if actual.lower() != 'coordinator' else "Coordinator (no LOI needed)"
|
||||
elif actual.lower() in ['draft', 'pending']:
|
||||
status = CheckStatus.WARN
|
||||
details = "LOI not yet confirmed (follow up needed)"
|
||||
else:
|
||||
status = CheckStatus.FAIL
|
||||
details = f"LOI status unclear: {actual}"
|
||||
|
||||
results.append(ValidationResult(
|
||||
check_name=f"LOI Status ({partner.name})",
|
||||
status=status,
|
||||
expected=expected,
|
||||
actual=actual,
|
||||
details=details
|
||||
))
|
||||
|
||||
return results
|
||||
|
||||
def run_all_checks(self) -> bool:
|
||||
"""Run all validation checks and store results."""
|
||||
print("=" * 80)
|
||||
print("PQC INTEGRATION BUDGET & PERSON-MONTH VALIDATION")
|
||||
print("=" * 80)
|
||||
print()
|
||||
|
||||
# Calculate totals
|
||||
total_budget, total_pm = self.calculate_totals()
|
||||
|
||||
# Run checks
|
||||
self.results.append(self.check_total_budget(total_budget))
|
||||
self.results.append(self.check_total_person_months(total_pm))
|
||||
self.results.extend(self.check_budget_distribution())
|
||||
self.results.extend(self.check_loi_status())
|
||||
|
||||
# Check if all passed
|
||||
all_passed = all(r.status == CheckStatus.PASS for r in self.results)
|
||||
has_warnings = any(r.status == CheckStatus.WARN for r in self.results)
|
||||
has_failures = any(r.status == CheckStatus.FAIL for r in self.results)
|
||||
|
||||
return all_passed, has_warnings, has_failures
|
||||
|
||||
def print_partner_breakdown(self):
|
||||
"""Print detailed partner breakdown table."""
|
||||
print("\n" + "=" * 80)
|
||||
print("PARTNER BREAKDOWN")
|
||||
print("=" * 80)
|
||||
print()
|
||||
print(f"{'Partner':<35} {'Country':<8} {'Budget':<15} {'%':<8} {'PM':<8} {'FTE':<6}")
|
||||
print("-" * 80)
|
||||
|
||||
for partner in self.partners:
|
||||
budget_str = f"€{partner.budget_eur:,}"
|
||||
pct_str = f"{partner.budget_pct:.1f}%"
|
||||
pm_str = f"{partner.person_months:.1f}"
|
||||
fte_str = f"{partner.pm_fte_avg:.2f}"
|
||||
|
||||
print(f"{partner.name:<35} {partner.country:<8} {budget_str:<15} {pct_str:<8} {pm_str:<8} {fte_str:<6}")
|
||||
|
||||
# Print totals
|
||||
total_budget, total_pm = self.calculate_totals()
|
||||
total_fte = total_pm / self.PROJECT_MONTHS
|
||||
print("-" * 80)
|
||||
print(f"{'TOTAL':<35} {'':<8} {'€{:,}'.format(total_budget):<15} {'100.0%':<8} {f'{total_pm:.1f}':<8} {f'{total_fte:.2f}':<6}")
|
||||
print()
|
||||
|
||||
def print_validation_results(self):
|
||||
"""Print validation results in formatted table."""
|
||||
print("\n" + "=" * 80)
|
||||
print("VALIDATION RESULTS")
|
||||
print("=" * 80)
|
||||
print()
|
||||
print(f"{'Check':<40} {'Status':<10} {'Expected':<20} {'Actual':<20}")
|
||||
print("-" * 80)
|
||||
|
||||
for result in self.results:
|
||||
status_symbol = result.status.value
|
||||
print(f"{result.check_name:<40} {status_symbol:<10} {result.expected:<20} {result.actual:<20}")
|
||||
if result.details:
|
||||
print(f" → {result.details}")
|
||||
|
||||
print()
|
||||
|
||||
def print_summary(self, all_passed: bool, has_warnings: bool, has_failures: bool):
|
||||
"""Print final summary with recommendations."""
|
||||
print("=" * 80)
|
||||
print("SUMMARY")
|
||||
print("=" * 80)
|
||||
print()
|
||||
|
||||
total_checks = len(self.results)
|
||||
passed = sum(1 for r in self.results if r.status == CheckStatus.PASS)
|
||||
warned = sum(1 for r in self.results if r.status == CheckStatus.WARN)
|
||||
failed = sum(1 for r in self.results if r.status == CheckStatus.FAIL)
|
||||
|
||||
print(f"Total Checks: {total_checks}")
|
||||
print(f"✓ Passed: {passed}")
|
||||
print(f"⚠ Warnings: {warned}")
|
||||
print(f"✗ Failed: {failed}")
|
||||
print()
|
||||
|
||||
if all_passed:
|
||||
print("🎉 ALL CHECKS PASSED — Budget ready for submission!")
|
||||
print()
|
||||
print("Next steps:")
|
||||
print(" 1. Verify all partner PICs are registered on EU Funding & Tenders Portal")
|
||||
print(" 2. Ensure consortium agreement includes these budget allocations")
|
||||
print(" 3. Cross-check with Part B Section 3.1 (Work Plan & Resources)")
|
||||
print(" 4. Run this checker again if any changes are made to consortium-tracker.csv")
|
||||
return True
|
||||
elif has_failures:
|
||||
print("❌ CRITICAL ISSUES DETECTED — Budget requires fixes before submission!")
|
||||
print()
|
||||
print("Action required:")
|
||||
print(" 1. Review failed checks above")
|
||||
print(" 2. Update consortium-tracker.csv with corrected values")
|
||||
print(" 3. Re-run budget_checker.py to verify fixes")
|
||||
print(" 4. Notify steering committee if budget reallocation needed (requires 75% vote)")
|
||||
return False
|
||||
elif has_warnings:
|
||||
print("⚠️ WARNINGS DETECTED — Budget mostly ready, minor issues to address")
|
||||
print()
|
||||
print("Recommended actions:")
|
||||
print(" 1. Review warnings above (may be acceptable variances)")
|
||||
print(" 2. Confirm with steering committee if warnings are acceptable")
|
||||
print(" 3. Document any intentional deviations in consortium agreement")
|
||||
print(" 4. Re-run checker after any corrections")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
# Determine path to consortium-tracker.csv (relative to this script)
|
||||
script_dir = Path(__file__).parent
|
||||
csv_path = script_dir.parent / "consortium" / "consortium-tracker.csv"
|
||||
|
||||
print(f"PQC Integration Budget Checker v1.0")
|
||||
print(f"Checking: {csv_path}")
|
||||
print()
|
||||
|
||||
checker = BudgetChecker(csv_path)
|
||||
|
||||
# Load CSV
|
||||
if not checker.load_csv():
|
||||
sys.exit(1)
|
||||
|
||||
# Print partner breakdown
|
||||
checker.print_partner_breakdown()
|
||||
|
||||
# Run validation checks
|
||||
all_passed, has_warnings, has_failures = checker.run_all_checks()
|
||||
|
||||
# Print results
|
||||
checker.print_validation_results()
|
||||
checker.print_summary(all_passed, has_warnings, has_failures)
|
||||
|
||||
# Exit with appropriate code
|
||||
if has_failures:
|
||||
sys.exit(2) # Critical failures
|
||||
elif has_warnings:
|
||||
sys.exit(1) # Warnings only
|
||||
else:
|
||||
sys.exit(0) # All passed
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,499 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
VaultMesh Funding Roadmap — Genesis Receipt Generator
|
||||
Rubedo Seal II: Treasury Nebula Activation
|
||||
|
||||
Generates cryptographic genesis receipt for complete funding roadmap:
|
||||
- Computes SHA-256 hash of all roadmap files
|
||||
- Builds Merkle tree from file hashes
|
||||
- Creates genesis receipt with Rubedo seal
|
||||
- Produces human-readable proof chain document
|
||||
- Emits receipt to VaultMesh permanent ledger
|
||||
|
||||
Usage:
|
||||
python3 generate_genesis_receipt.py [--dry-run]
|
||||
"""
|
||||
|
||||
import json
|
||||
import hashlib
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Tuple
|
||||
import sys
|
||||
|
||||
class MerkleTree:
|
||||
"""Simple Merkle tree implementation for funding roadmap files."""
|
||||
|
||||
@staticmethod
|
||||
def hash_data(data: str) -> str:
|
||||
"""SHA-256 hash of data."""
|
||||
return hashlib.sha256(data.encode('utf-8')).hexdigest()
|
||||
|
||||
@staticmethod
|
||||
def hash_pair(left: str, right: str) -> str:
|
||||
"""Hash two nodes together."""
|
||||
return hashlib.sha256((left + right).encode('utf-8')).hexdigest()
|
||||
|
||||
@classmethod
|
||||
def build_tree(cls, leaf_hashes: List[str]) -> Tuple[str, List[List[str]]]:
|
||||
"""
|
||||
Build Merkle tree from leaf hashes.
|
||||
Returns: (root_hash, tree_levels)
|
||||
"""
|
||||
if not leaf_hashes:
|
||||
return cls.hash_data(""), [[]]
|
||||
|
||||
# If odd number of leaves, duplicate last one
|
||||
if len(leaf_hashes) % 2 == 1:
|
||||
leaf_hashes = leaf_hashes + [leaf_hashes[-1]]
|
||||
|
||||
tree_levels = [leaf_hashes]
|
||||
current_level = leaf_hashes
|
||||
|
||||
while len(current_level) > 1:
|
||||
next_level = []
|
||||
for i in range(0, len(current_level), 2):
|
||||
left = current_level[i]
|
||||
right = current_level[i + 1] if i + 1 < len(current_level) else current_level[i]
|
||||
parent = cls.hash_pair(left, right)
|
||||
next_level.append(parent)
|
||||
tree_levels.append(next_level)
|
||||
current_level = next_level
|
||||
|
||||
return current_level[0], tree_levels
|
||||
|
||||
class FundingRoadmapGenesis:
|
||||
"""Genesis receipt generator for VaultMesh Funding Roadmap."""
|
||||
|
||||
def __init__(self, roadmap_dir: Path):
|
||||
self.roadmap_dir = roadmap_dir
|
||||
self.timestamp = datetime.datetime.now(datetime.timezone.utc)
|
||||
self.files_data = []
|
||||
|
||||
def scan_files(self) -> List[Dict]:
|
||||
"""Scan all roadmap files and compute hashes."""
|
||||
print(f"📂 Scanning {self.roadmap_dir}")
|
||||
|
||||
# Include all markdown, CSV, and Mermaid files
|
||||
patterns = ['**/*.md', '**/*.csv', '**/*.mmd']
|
||||
all_files = []
|
||||
|
||||
for pattern in patterns:
|
||||
all_files.extend(self.roadmap_dir.glob(pattern))
|
||||
|
||||
# Sort for deterministic ordering
|
||||
all_files = sorted(all_files)
|
||||
|
||||
for file_path in all_files:
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8')
|
||||
file_hash = hashlib.sha256(content.encode('utf-8')).hexdigest()
|
||||
|
||||
self.files_data.append({
|
||||
'path': str(file_path.relative_to(self.roadmap_dir)),
|
||||
'hash': file_hash,
|
||||
'size': len(content),
|
||||
'lines': content.count('\n') + 1
|
||||
})
|
||||
|
||||
print(f" ✓ {file_path.name:50s} {file_hash[:16]}... ({len(content):6d} bytes)")
|
||||
|
||||
except Exception as e:
|
||||
print(f" ✗ {file_path.name}: {e}")
|
||||
|
||||
return self.files_data
|
||||
|
||||
def build_merkle_tree(self) -> Tuple[str, List[List[str]]]:
|
||||
"""Build Merkle tree from file hashes."""
|
||||
print(f"\n🌳 Building Merkle tree from {len(self.files_data)} files")
|
||||
|
||||
leaf_hashes = [f['hash'] for f in self.files_data]
|
||||
root_hash, tree_levels = MerkleTree.build_tree(leaf_hashes)
|
||||
|
||||
print(f" → Tree depth: {len(tree_levels)} levels")
|
||||
print(f" → Root hash: {root_hash}")
|
||||
|
||||
return root_hash, tree_levels
|
||||
|
||||
def generate_genesis_receipt(self, merkle_root: str) -> Dict:
|
||||
"""Generate genesis receipt for funding roadmap."""
|
||||
print(f"\n🜂 Generating Genesis Receipt (Rubedo Seal)")
|
||||
|
||||
# Calculate aggregate statistics
|
||||
total_lines = sum(f['lines'] for f in self.files_data)
|
||||
total_bytes = sum(f['size'] for f in self.files_data)
|
||||
|
||||
receipt = {
|
||||
"kind": "funding.roadmap.genesis",
|
||||
"milestone": "Treasury Nebula Activation",
|
||||
"phase": "Rubedo",
|
||||
"seal": "II",
|
||||
"ts": self.timestamp.isoformat(),
|
||||
"coordinator": "VaultMesh Technologies B.V.",
|
||||
"guardian": "guardian@vaultmesh.org",
|
||||
|
||||
"manifest": {
|
||||
"files_count": len(self.files_data),
|
||||
"total_lines": total_lines,
|
||||
"total_bytes": total_bytes,
|
||||
"merkle_root": merkle_root
|
||||
},
|
||||
|
||||
"funding_axis": {
|
||||
"proposals": 8,
|
||||
"total_budget_eur": "15.8M+",
|
||||
"partners": "20+",
|
||||
"countries": "10+",
|
||||
"work_packages": "25+",
|
||||
"pilots": "12+",
|
||||
"diagrams": 4,
|
||||
"timeline": "2025-2027"
|
||||
},
|
||||
|
||||
"deliverables": {
|
||||
"loi_template": True,
|
||||
"onboarding_kit": True,
|
||||
"consortium_tracker": True,
|
||||
"architecture_diagrams": 4,
|
||||
"meta_visualization": "treasury-nebula-map.mmd"
|
||||
},
|
||||
|
||||
"tier_1_proposals": [
|
||||
{
|
||||
"name": "PQC Integration",
|
||||
"budget_eur": "2.8M",
|
||||
"call": "HORIZON-CL3-2025-CS-ECCC-06",
|
||||
"deadline": "2025-12-15",
|
||||
"partners": 4
|
||||
},
|
||||
{
|
||||
"name": "Digital Twins",
|
||||
"budget_eur": "10M",
|
||||
"call": "HORIZON-CL4-2025-DIGITAL-03",
|
||||
"deadline": "2026-01-20",
|
||||
"partners": 6
|
||||
}
|
||||
],
|
||||
|
||||
"vaultmesh_organs": [
|
||||
"LAWCHAIN",
|
||||
"Ψ-Field",
|
||||
"Federation",
|
||||
"Receipts",
|
||||
"Treasury"
|
||||
],
|
||||
|
||||
"policy_alignment": [
|
||||
"AI Act (Reg 2024/1689)",
|
||||
"DORA",
|
||||
"NIS2",
|
||||
"Gaia-X",
|
||||
"EHDS"
|
||||
],
|
||||
|
||||
"files": self.files_data,
|
||||
|
||||
"proof_chain": {
|
||||
"hash_algorithm": "SHA-256",
|
||||
"tree_type": "Merkle",
|
||||
"anchoring": {
|
||||
"rfc3161_tsa": "pending",
|
||||
"ethereum": "pending",
|
||||
"bitcoin": "pending"
|
||||
}
|
||||
},
|
||||
|
||||
"declaration": "All Funding Organs Activated. Treasury Nebula Breathing.",
|
||||
|
||||
"next_horizon": {
|
||||
"milestone": "PQC Integration Submission",
|
||||
"deadline": "2025-12-15",
|
||||
"days_remaining": 39
|
||||
}
|
||||
}
|
||||
|
||||
return receipt
|
||||
|
||||
def save_receipt(self, receipt: Dict, dry_run: bool = False) -> Path:
|
||||
"""Save receipt to VaultMesh ledger."""
|
||||
receipts_dir = Path.home() / '.vaultmesh' / 'receipts'
|
||||
|
||||
if not receipts_dir.exists():
|
||||
print(f"\n⚠️ Receipt directory not found: {receipts_dir}")
|
||||
receipts_dir = self.roadmap_dir / 'proofs'
|
||||
receipts_dir.mkdir(exist_ok=True)
|
||||
print(f" → Using fallback: {receipts_dir}")
|
||||
|
||||
timestamp_str = self.timestamp.strftime("%Y%m%d%H%M%S")
|
||||
receipt_path = receipts_dir / f'genesis-roadmap-rubedo-{timestamp_str}.json'
|
||||
|
||||
if dry_run:
|
||||
print(f"\n🏃 DRY RUN: Would save to {receipt_path}")
|
||||
print(json.dumps(receipt, indent=2)[:500] + "\n...")
|
||||
else:
|
||||
receipt_path.write_text(json.dumps(receipt, indent=2))
|
||||
print(f"\n✅ Genesis receipt saved: {receipt_path}")
|
||||
|
||||
return receipt_path
|
||||
|
||||
def generate_proof_chain_document(self, receipt: Dict, merkle_root: str,
|
||||
tree_levels: List[List[str]], dry_run: bool = False) -> Path:
|
||||
"""Generate human-readable proof chain document."""
|
||||
doc_path = self.roadmap_dir / 'PROOF_CHAIN.md'
|
||||
|
||||
doc_content = f"""# VaultMesh Funding Roadmap — Proof Chain
|
||||
|
||||
**Genesis Receipt:** Rubedo Seal II — Treasury Nebula Activation
|
||||
**Timestamp:** {self.timestamp.isoformat()}
|
||||
**Merkle Root:** `{merkle_root}`
|
||||
|
||||
---
|
||||
|
||||
## 🜂 Rubedo Genesis Block
|
||||
|
||||
This document provides cryptographic proof of the VaultMesh Funding Roadmap 2025-2027 at the moment of Rubedo attainment (Treasury Nebula Activation).
|
||||
|
||||
**What this proves:**
|
||||
- All {len(self.files_data)} files in the funding roadmap existed at this timestamp
|
||||
- The Merkle root cryptographically binds all files together
|
||||
- Any modification to any file will change the Merkle root
|
||||
- This genesis receipt can be anchored to RFC-3161 TSA and blockchain for tamper-evidence
|
||||
|
||||
---
|
||||
|
||||
## 📊 Manifest Summary
|
||||
|
||||
**Files:** {receipt['manifest']['files_count']}
|
||||
**Total Lines:** {receipt['manifest']['total_lines']:,}
|
||||
**Total Bytes:** {receipt['manifest']['total_bytes']:,}
|
||||
**Merkle Root:** `{merkle_root}`
|
||||
|
||||
**Coverage:**
|
||||
- **Proposals:** {receipt['funding_axis']['proposals']} (€{receipt['funding_axis']['total_budget_eur']})
|
||||
- **Partners:** {receipt['funding_axis']['partners']} organizations across {receipt['funding_axis']['countries']} countries
|
||||
- **Work Packages:** {receipt['funding_axis']['work_packages']}+
|
||||
- **Validation Pilots:** {receipt['funding_axis']['pilots']}+
|
||||
- **Architecture Diagrams:** {receipt['funding_axis']['diagrams']} (including meta-visualization)
|
||||
|
||||
---
|
||||
|
||||
## 📁 File Manifest (Merkle Leaves)
|
||||
|
||||
"""
|
||||
|
||||
# Add file table
|
||||
doc_content += "| # | File | Hash (SHA-256) | Lines | Bytes |\n"
|
||||
doc_content += "|---|------|----------------|-------|-------|\n"
|
||||
|
||||
for idx, file_data in enumerate(self.files_data, 1):
|
||||
doc_content += f"| {idx:2d} | `{file_data['path']}` | `{file_data['hash'][:16]}...` | {file_data['lines']:,} | {file_data['size']:,} |\n"
|
||||
|
||||
# Add Merkle tree structure
|
||||
doc_content += f"""
|
||||
---
|
||||
|
||||
## 🌳 Merkle Tree Structure
|
||||
|
||||
**Tree Depth:** {len(tree_levels)} levels
|
||||
**Leaf Nodes:** {len(tree_levels[0])}
|
||||
**Root Hash:** `{merkle_root}`
|
||||
|
||||
### Level-by-Level Breakdown
|
||||
|
||||
"""
|
||||
|
||||
for level_idx, level in enumerate(tree_levels):
|
||||
if level_idx == 0:
|
||||
doc_content += f"**Level 0 (Leaves):** {len(level)} file hashes\n"
|
||||
elif level_idx == len(tree_levels) - 1:
|
||||
doc_content += f"**Level {level_idx} (Root):** `{level[0]}`\n"
|
||||
else:
|
||||
doc_content += f"**Level {level_idx}:** {len(level)} intermediate nodes\n"
|
||||
|
||||
# Add verification instructions
|
||||
doc_content += f"""
|
||||
---
|
||||
|
||||
## 🔍 Verification Instructions
|
||||
|
||||
### Verify File Hash
|
||||
```bash
|
||||
# Verify any file hasn't been modified
|
||||
sha256sum funding-roadmap/diagrams/treasury-nebula-map.mmd
|
||||
# Compare output to hash in manifest above
|
||||
```
|
||||
|
||||
### Reconstruct Merkle Root
|
||||
```bash
|
||||
# Run genesis receipt generator
|
||||
cd ~/vaultmesh-core/funding-roadmap
|
||||
python3 scripts/generate_genesis_receipt.py --dry-run
|
||||
|
||||
# Compare output Merkle root to this document
|
||||
# If roots match, all files are intact
|
||||
```
|
||||
|
||||
### Anchor to External Timestamping
|
||||
```bash
|
||||
# Request RFC-3161 timestamp (when TSA integration available)
|
||||
openssl ts -query -data PROOF_CHAIN.md -sha256 -out roadmap.tsq
|
||||
curl -X POST https://freetsa.org/tsr -H "Content-Type: application/timestamp-query" --data-binary @roadmap.tsq -o roadmap.tsr
|
||||
|
||||
# Anchor Merkle root to Ethereum (when available)
|
||||
# Anchor Merkle root to Bitcoin (when available)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📜 Genesis Receipt JSON
|
||||
|
||||
**Location:** `.vaultmesh/receipts/genesis-roadmap-rubedo-{self.timestamp.strftime("%Y%m%d%H%M%S")}.json`
|
||||
|
||||
**Kind:** `funding.roadmap.genesis`
|
||||
**Milestone:** Treasury Nebula Activation
|
||||
**Phase:** Rubedo (Perfection)
|
||||
**Seal:** II
|
||||
|
||||
**Key Fields:**
|
||||
```json
|
||||
{{
|
||||
"manifest": {{
|
||||
"merkle_root": "{merkle_root}"
|
||||
}},
|
||||
"funding_axis": {{
|
||||
"proposals": {receipt['funding_axis']['proposals']},
|
||||
"total_budget_eur": "{receipt['funding_axis']['total_budget_eur']}",
|
||||
"partners": "{receipt['funding_axis']['partners']}",
|
||||
"timeline": "{receipt['funding_axis']['timeline']}"
|
||||
}},
|
||||
"declaration": "{receipt['declaration']}"
|
||||
}}
|
||||
```
|
||||
|
||||
Full receipt available at path above.
|
||||
|
||||
---
|
||||
|
||||
## 🎯 What This Proof Chain Guarantees
|
||||
|
||||
1. **Integrity:** Any modification to any file will break the Merkle root
|
||||
2. **Timestamp:** This exact state existed at {self.timestamp.isoformat()}
|
||||
3. **Completeness:** All {len(self.files_data)} files are accounted for in the tree
|
||||
4. **Reproducibility:** Anyone can verify by recomputing file hashes
|
||||
5. **Non-repudiation:** Once anchored to TSA/blockchain, this state is permanent
|
||||
|
||||
---
|
||||
|
||||
## 🌌 Treasury Nebula — Civilization Ledger Declaration
|
||||
|
||||
> *"All Funding Organs Activated. Treasury Nebula Breathing."*
|
||||
|
||||
This proof chain marks the **Rubedo attainment** of the VaultMesh Funding Roadmap 2025-2027:
|
||||
|
||||
- €15.8M+ orchestrated across 8 EU Horizon Europe proposals
|
||||
- 20+ consortium partners mapped across 10+ countries
|
||||
- 4 comprehensive architecture diagrams (including Treasury Nebula meta-visualization)
|
||||
- Complete partner onboarding, LOI templates, and consortium tracking infrastructure
|
||||
- Production-ready coordination protocol for civilization-scale funding federation
|
||||
|
||||
**Next Horizon:** PQC Integration submission (Dec 15, 2025) — 39 days
|
||||
|
||||
---
|
||||
|
||||
## 🜂 Alchemical Signature
|
||||
|
||||
**Phase:** Rubedo (Reddening) — Perfection Attained
|
||||
**Coordinator:** VaultMesh Technologies B.V.
|
||||
**Guardian:** Karol Stefanski (guardian@vaultmesh.org)
|
||||
**Forged By:** Genesis Receipt Generator v1.0
|
||||
|
||||
**Merkle Root:** `{merkle_root}`
|
||||
**Timestamp:** {self.timestamp.isoformat()}
|
||||
**Receipt:** `genesis-roadmap-rubedo-{self.timestamp.strftime("%Y%m%d%H%M%S")}.json`
|
||||
|
||||
---
|
||||
|
||||
**Document Control:**
|
||||
- Version: 1.0-GENESIS
|
||||
- Classification: Cryptographic Proof (Public Chain)
|
||||
- Owner: VaultMesh Technologies B.V.
|
||||
- Purpose: Permanent ledger record of Rubedo Seal II
|
||||
"""
|
||||
|
||||
if dry_run:
|
||||
print(f"\n🏃 DRY RUN: Would save proof chain to {doc_path}")
|
||||
print(doc_content[:500] + "\n...")
|
||||
else:
|
||||
doc_path.write_text(doc_content)
|
||||
print(f"\n✅ Proof chain document saved: {doc_path}")
|
||||
|
||||
return doc_path
|
||||
|
||||
def main():
|
||||
"""Main execution."""
|
||||
print("=" * 70)
|
||||
print("🜂 VaultMesh Funding Roadmap — Genesis Receipt Generator")
|
||||
print(" Rubedo Seal II: Treasury Nebula Activation")
|
||||
print("=" * 70)
|
||||
|
||||
# Check for dry-run flag
|
||||
dry_run = '--dry-run' in sys.argv
|
||||
if dry_run:
|
||||
print("\n🏃 DRY RUN MODE (no files will be written)\n")
|
||||
|
||||
# Determine roadmap directory
|
||||
script_dir = Path(__file__).parent
|
||||
roadmap_dir = script_dir.parent
|
||||
|
||||
print(f"\n📂 Roadmap directory: {roadmap_dir}")
|
||||
|
||||
# Initialize generator
|
||||
genesis = FundingRoadmapGenesis(roadmap_dir)
|
||||
|
||||
# Scan files
|
||||
files_data = genesis.scan_files()
|
||||
|
||||
if not files_data:
|
||||
print("\n❌ No files found in roadmap directory")
|
||||
return 1
|
||||
|
||||
# Build Merkle tree
|
||||
merkle_root, tree_levels = genesis.build_merkle_tree()
|
||||
|
||||
# Generate genesis receipt
|
||||
receipt = genesis.generate_genesis_receipt(merkle_root)
|
||||
|
||||
# Save receipt
|
||||
receipt_path = genesis.save_receipt(receipt, dry_run=dry_run)
|
||||
|
||||
# Generate proof chain document
|
||||
proof_path = genesis.generate_proof_chain_document(
|
||||
receipt, merkle_root, tree_levels, dry_run=dry_run
|
||||
)
|
||||
|
||||
# Summary
|
||||
print("\n" + "=" * 70)
|
||||
print("✨ GENESIS COMPLETE")
|
||||
print("=" * 70)
|
||||
print(f"📊 Files processed: {len(files_data)}")
|
||||
print(f"📏 Total lines: {receipt['manifest']['total_lines']:,}")
|
||||
print(f"💾 Total bytes: {receipt['manifest']['total_bytes']:,}")
|
||||
print(f"🌳 Merkle root: {merkle_root[:32]}...")
|
||||
print(f"🜂 Genesis receipt: {receipt_path.name}")
|
||||
print(f"📜 Proof chain: {proof_path.name}")
|
||||
print(f"⏰ Timestamp: {genesis.timestamp.isoformat()}")
|
||||
|
||||
if not dry_run:
|
||||
print(f"\n🎯 Next steps:")
|
||||
print(f" 1. Review: cat {proof_path}")
|
||||
print(f" 2. Verify: sha256sum {roadmap_dir}/**/*.md")
|
||||
print(f" 3. Archive: cp {receipt_path} ~/backups/")
|
||||
print(f" 4. Anchor: [TSA/Ethereum/Bitcoin when available]")
|
||||
|
||||
print("\n🌌 Treasury Nebula: BREATHING")
|
||||
print("=" * 70)
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python3
|
||||
import csv
|
||||
import argparse
|
||||
import datetime as dt
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
def parse_args():
|
||||
p = argparse.ArgumentParser(
|
||||
description="Generate consortium summary markdown from CSV"
|
||||
)
|
||||
p.add_argument(
|
||||
"--csv",
|
||||
default="consortium/consortium-tracker.csv",
|
||||
help="Path to consortium-tracker.csv",
|
||||
)
|
||||
return p.parse_args()
|
||||
|
||||
|
||||
def to_float(val: str) -> float:
|
||||
try:
|
||||
return float(str(val).replace(",", "").strip())
|
||||
except Exception:
|
||||
return 0.0
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
proposals = defaultdict(list)
|
||||
with open(args.csv, newline="", encoding="utf-8") as f:
|
||||
reader = csv.DictReader(f)
|
||||
for row in reader:
|
||||
track = row.get("Proposal Track", "Unknown") or "Unknown"
|
||||
proposals[track].append(row)
|
||||
|
||||
today = dt.date.today().isoformat()
|
||||
print(f"# Consortium Summary\n")
|
||||
print(f"Generated: {today}\n")
|
||||
|
||||
grand_total = 0.0
|
||||
for track in sorted(proposals.keys()):
|
||||
rows = proposals[track]
|
||||
partners = [r for r in rows if r.get("Partner Name") and r["Partner Name"] != "[Template Row]"]
|
||||
if not partners:
|
||||
continue
|
||||
|
||||
approved = sum(1 for r in partners if r.get("LOI Status", "").lower() == "approved")
|
||||
pending = sum(1 for r in partners if r.get("LOI Status", "").lower() == "pending")
|
||||
under_review = sum(1 for r in partners if r.get("LOI Status", "").lower() == "under review")
|
||||
|
||||
total_budget = sum(to_float(r.get("Budget (€)", 0)) for r in partners)
|
||||
grand_total += total_budget
|
||||
|
||||
print(f"## {track}")
|
||||
print(f"Partners: {len(partners)} | LOIs Approved: {approved} | Under Review: {under_review} | Pending: {pending}")
|
||||
print(f"Total Budget (reported): €{total_budget:,.0f}\n")
|
||||
print("| Partner | Country | Type | Budget (€) | PM | LOI | Contact | Email |")
|
||||
print("|---|---|---:|---:|---:|---|---|---|")
|
||||
for r in partners:
|
||||
name = r.get("Partner Name", "")
|
||||
country = r.get("Country", "")
|
||||
ptype = r.get("Partner Type", "")
|
||||
budget = r.get("Budget (€)", "")
|
||||
pm = r.get("Person-Months", "")
|
||||
loi = r.get("LOI Status", "")
|
||||
contact = r.get("Primary Contact", "")
|
||||
email = r.get("Email", "")
|
||||
print(f"| {name} | {country} | {ptype} | {budget} | {pm} | {loi} | {contact} | {email} |")
|
||||
print()
|
||||
|
||||
print(f"---\n\nGrand Total Budget (reported): €{grand_total:,.0f}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -0,0 +1,147 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
usage() {
|
||||
cat <<EOF
|
||||
Package VaultMesh funding-roadmap deliverables for Horizon submission.
|
||||
|
||||
Usage:
|
||||
bash scripts/package_horizon.sh [--dest DIR] [--render] [--tar|--zip]
|
||||
|
||||
Options:
|
||||
--dest DIR Destination base directory (default: \"$HOME/downloads/horizon-submission\")
|
||||
--render If mermaid-cli (mmdc) is available, render PNG/SVG for diagrams
|
||||
--tar Also create a .tar.gz archive alongside the folder (default)
|
||||
--zip Create a .zip archive if \"zip\" is available
|
||||
-h, --help Show this help
|
||||
|
||||
Creates structure:
|
||||
horizon-submission-YYYYMMDD/
|
||||
common/ (roadmap + deliverables summary)
|
||||
templates/
|
||||
consortium/ (tracker + generated consortium-summary.md)
|
||||
diagrams/ (.mmd + optional rendered PNG/SVG)
|
||||
MANIFEST.sha256 (if sha256sum is available)
|
||||
README.txt
|
||||
EOF
|
||||
}
|
||||
|
||||
DEST_DEFAULT="${HOME}/downloads/horizon-submission"
|
||||
ARCHIVE_FMT="tar"
|
||||
RENDER="0"
|
||||
|
||||
DEST="$DEST_DEFAULT"
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--dest)
|
||||
DEST="$2"; shift 2 ;;
|
||||
--render)
|
||||
RENDER="1"; shift ;;
|
||||
--zip)
|
||||
ARCHIVE_FMT="zip"; shift ;;
|
||||
--tar)
|
||||
ARCHIVE_FMT="tar"; shift ;;
|
||||
-h|--help)
|
||||
usage; exit 0 ;;
|
||||
*)
|
||||
echo "Unknown option: $1" >&2; usage; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Resolve repository root (this script is under funding-roadmap/scripts)
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
ROOT_DIR="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||
|
||||
DATE="$(date +%Y%m%d)"
|
||||
PKG_DIR="${DEST}/horizon-submission-${DATE}"
|
||||
|
||||
echo "Packaging to: ${PKG_DIR}"
|
||||
mkdir -p "${PKG_DIR}/"{common,templates,consortium,diagrams}
|
||||
|
||||
# Copy core docs
|
||||
for f in "${ROOT_DIR}/VaultMesh_Funding_Roadmap_2025-2027.md" \
|
||||
"${ROOT_DIR}/DELIVERABLES_COMPLETE.md"; do
|
||||
if [[ -f "$f" ]]; then
|
||||
cp "$f" "${PKG_DIR}/common/"
|
||||
fi
|
||||
done
|
||||
|
||||
# Templates
|
||||
cp "${ROOT_DIR}/templates/"*.md "${PKG_DIR}/templates/" 2>/dev/null || true
|
||||
|
||||
# Consortium tracker + README + generated summary
|
||||
cp "${ROOT_DIR}/consortium/consortium-tracker.csv" "${PKG_DIR}/consortium/"
|
||||
cp "${ROOT_DIR}/consortium/README.md" "${PKG_DIR}/consortium/"
|
||||
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
python3 "${ROOT_DIR}/scripts/generate_summary.py" \
|
||||
--csv "${ROOT_DIR}/consortium/consortium-tracker.csv" \
|
||||
> "${PKG_DIR}/consortium/consortium-summary.md"
|
||||
else
|
||||
printf "python3 not found; skipping consortium-summary.md\n" >&2
|
||||
fi
|
||||
|
||||
# Diagrams (.mmd + README)
|
||||
cp "${ROOT_DIR}/diagrams/"*.mmd "${PKG_DIR}/diagrams/"
|
||||
cp "${ROOT_DIR}/diagrams/README.md" "${PKG_DIR}/diagrams/"
|
||||
|
||||
# Optional rendering to PNG/SVG if mermaid-cli is present and --render given
|
||||
if [[ "$RENDER" = "1" ]] && command -v mmdc >/dev/null 2>&1; then
|
||||
echo "Rendering diagrams to PNG/SVG via mermaid-cli..."
|
||||
pushd "${PKG_DIR}/diagrams" >/dev/null
|
||||
for file in *.mmd; do
|
||||
base="${file%.mmd}"
|
||||
mmdc -i "$file" -o "${base}.png" -w 3000 -b transparent || true
|
||||
mmdc -i "$file" -o "${base}.svg" || true
|
||||
done
|
||||
popd >/dev/null
|
||||
else
|
||||
if [[ "$RENDER" = "1" ]]; then
|
||||
echo "Note: --render requested but mermaid-cli (mmdc) not found; skipping render." >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Manifest of file hashes if available
|
||||
if command -v sha256sum >/dev/null 2>&1; then
|
||||
(cd "${PKG_DIR}" && find . -type f -print0 | sort -z | xargs -0 sha256sum) \
|
||||
> "${PKG_DIR}/MANIFEST.sha256"
|
||||
fi
|
||||
|
||||
# README for the package
|
||||
cat > "${PKG_DIR}/README.txt" <<'TXT'
|
||||
Horizon Submission Package
|
||||
--------------------------
|
||||
|
||||
Contents:
|
||||
- common/ Funding roadmap and deliverables summary
|
||||
- templates/ Letter of Intent + Partner Onboarding 1‑pager
|
||||
- consortium/ Tracker CSV + README + generated consortium-summary.md
|
||||
- diagrams/ Mermaid (.mmd) + README (+ PNG/SVG if rendered)
|
||||
|
||||
Notes:
|
||||
- Review consortium-summary.md before external sharing (contains contacts/emails).
|
||||
- Render diagrams if needed: install mermaid-cli (mmdc) and re-run with --render.
|
||||
- Archive integrity: see MANIFEST.sha256 (if generated).
|
||||
|
||||
Coordinator contact: guardian@vaultmesh.org
|
||||
TXT
|
||||
|
||||
# Optional archive alongside folder
|
||||
case "$ARCHIVE_FMT" in
|
||||
tar)
|
||||
ARCHIVE_PATH="${PKG_DIR}.tar.gz"
|
||||
(cd "${DEST}" && tar czf "$(basename "${ARCHIVE_PATH}")" "$(basename "${PKG_DIR}")")
|
||||
echo "Created archive: ${ARCHIVE_PATH}"
|
||||
;;
|
||||
zip)
|
||||
if command -v zip >/dev/null 2>&1; then
|
||||
(cd "${DEST}" && zip -qr "$(basename "${PKG_DIR}").zip" "$(basename "${PKG_DIR}")")
|
||||
echo "Created archive: ${PKG_DIR}.zip"
|
||||
else
|
||||
echo "zip not found; skipping .zip archive" >&2
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "Package ready: ${PKG_DIR}"
|
||||
|
||||
Reference in New Issue
Block a user