- Complete Cloudflare Terraform configuration (DNS, WAF, tunnels, access) - WAF Intelligence MCP server with threat analysis and ML classification - GitOps automation with PR workflows and drift detection - Observatory monitoring stack with Prometheus/Grafana - IDE operator rules for governed development - Security playbooks and compliance frameworks - Autonomous remediation and state reconciliation
428 lines
16 KiB
Python
428 lines
16 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Cloudflare Invariant Checker
|
|
Tests state snapshots against defined invariants and produces anomaly reports.
|
|
|
|
Usage:
|
|
python3 invariant-checker.py --snapshot <path/to/snapshot.json>
|
|
|
|
Environment Variables:
|
|
MANIFEST_PATH - Path to DNS manifest (optional)
|
|
TERRAFORM_STATE_PATH - Path to Terraform state (optional)
|
|
|
|
Output:
|
|
- anomalies/invariant-report-<timestamp>.json
|
|
- Exit code 0 if all pass, 1 if any fail
|
|
"""
|
|
|
|
import argparse
|
|
import hashlib
|
|
import json
|
|
import os
|
|
import sys
|
|
from datetime import datetime, timezone
|
|
from typing import Any, Dict, List, Optional, Tuple
|
|
|
|
ANOMALY_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "anomalies")
|
|
|
|
|
|
class InvariantResult:
|
|
"""Result of an invariant check."""
|
|
|
|
def __init__(self, name: str, passed: bool, message: str, details: Optional[Dict] = None):
|
|
self.name = name
|
|
self.passed = passed
|
|
self.message = message
|
|
self.details = details or {}
|
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
|
return {
|
|
"invariant": self.name,
|
|
"passed": self.passed,
|
|
"message": self.message,
|
|
"details": self.details,
|
|
}
|
|
|
|
|
|
class InvariantChecker:
|
|
"""Checks Cloudflare state against defined invariants."""
|
|
|
|
def __init__(self, snapshot: Dict[str, Any], manifest: Optional[Dict] = None, tf_state: Optional[Dict] = None):
|
|
self.snapshot = snapshot
|
|
self.state = snapshot.get("state", {})
|
|
self.manifest = manifest
|
|
self.tf_state = tf_state
|
|
self.results: List[InvariantResult] = []
|
|
|
|
def check_all(self) -> List[InvariantResult]:
|
|
"""Run all invariant checks."""
|
|
self._check_dns_invariants()
|
|
self._check_waf_invariants()
|
|
self._check_access_invariants()
|
|
self._check_tunnel_invariants()
|
|
self._check_zone_settings_invariants()
|
|
if self.manifest:
|
|
self._check_manifest_drift()
|
|
return self.results
|
|
|
|
# === DNS Invariants ===
|
|
|
|
def _check_dns_invariants(self):
|
|
"""Check DNS-related invariants."""
|
|
dns = self.state.get("dns", {})
|
|
records = dns.get("records", [])
|
|
|
|
# INV-DNS-001: No unproxied A/AAAA records (unless explicitly internal)
|
|
unproxied = [
|
|
r for r in records
|
|
if r.get("type") in ("A", "AAAA")
|
|
and not r.get("proxied", False)
|
|
and not r.get("name", "").startswith("_") # Allow service records
|
|
]
|
|
self.results.append(InvariantResult(
|
|
"INV-DNS-001",
|
|
len(unproxied) == 0,
|
|
f"No unproxied A/AAAA records" if len(unproxied) == 0 else f"Found {len(unproxied)} unproxied A/AAAA records",
|
|
{"unproxied_records": [r.get("name") for r in unproxied]}
|
|
))
|
|
|
|
# INV-DNS-002: DNSSEC must be enabled
|
|
dnssec = dns.get("dnssec", {})
|
|
dnssec_enabled = dnssec.get("status") == "active"
|
|
self.results.append(InvariantResult(
|
|
"INV-DNS-002",
|
|
dnssec_enabled,
|
|
"DNSSEC is active" if dnssec_enabled else "DNSSEC is not active",
|
|
{"dnssec_status": dnssec.get("status")}
|
|
))
|
|
|
|
# INV-DNS-003: SPF record must exist
|
|
spf_records = [r for r in records if r.get("type") == "TXT" and "v=spf1" in r.get("content", "")]
|
|
self.results.append(InvariantResult(
|
|
"INV-DNS-003",
|
|
len(spf_records) > 0,
|
|
"SPF record exists" if len(spf_records) > 0 else "No SPF record found",
|
|
{"spf_count": len(spf_records)}
|
|
))
|
|
|
|
# INV-DNS-004: DMARC record must exist
|
|
dmarc_records = [r for r in records if r.get("name", "").startswith("_dmarc") and r.get("type") == "TXT"]
|
|
self.results.append(InvariantResult(
|
|
"INV-DNS-004",
|
|
len(dmarc_records) > 0,
|
|
"DMARC record exists" if len(dmarc_records) > 0 else "No DMARC record found",
|
|
{"dmarc_count": len(dmarc_records)}
|
|
))
|
|
|
|
# INV-DNS-005: No wildcard records (unless explicitly allowed)
|
|
wildcards = [r for r in records if "*" in r.get("name", "")]
|
|
self.results.append(InvariantResult(
|
|
"INV-DNS-005",
|
|
len(wildcards) == 0,
|
|
"No wildcard records" if len(wildcards) == 0 else f"Found {len(wildcards)} wildcard records",
|
|
{"wildcard_records": [r.get("name") for r in wildcards]}
|
|
))
|
|
|
|
# === WAF Invariants ===
|
|
|
|
def _check_waf_invariants(self):
|
|
"""Check WAF-related invariants."""
|
|
waf = self.state.get("waf", {})
|
|
rulesets = waf.get("rulesets", [])
|
|
|
|
# INV-WAF-001: Managed ruleset must be enabled
|
|
managed_rulesets = [rs for rs in rulesets if rs.get("kind") == "managed"]
|
|
self.results.append(InvariantResult(
|
|
"INV-WAF-001",
|
|
len(managed_rulesets) > 0,
|
|
"Managed WAF ruleset enabled" if len(managed_rulesets) > 0 else "No managed WAF ruleset found",
|
|
{"managed_ruleset_count": len(managed_rulesets)}
|
|
))
|
|
|
|
# INV-WAF-002: Firewall rules must exist
|
|
firewall_rules = waf.get("firewall_rules", [])
|
|
self.results.append(InvariantResult(
|
|
"INV-WAF-002",
|
|
len(firewall_rules) > 0,
|
|
f"Found {len(firewall_rules)} firewall rules" if len(firewall_rules) > 0 else "No firewall rules configured",
|
|
{"firewall_rule_count": len(firewall_rules)}
|
|
))
|
|
|
|
# === Zone Settings Invariants ===
|
|
|
|
def _check_zone_settings_invariants(self):
|
|
"""Check zone settings invariants."""
|
|
settings = self.state.get("zone_settings", {})
|
|
|
|
# INV-ZONE-001: TLS must be strict
|
|
ssl_mode = settings.get("ssl")
|
|
self.results.append(InvariantResult(
|
|
"INV-ZONE-001",
|
|
ssl_mode in ("strict", "full_strict"),
|
|
f"TLS mode is {ssl_mode}" if ssl_mode in ("strict", "full_strict") else f"TLS mode is {ssl_mode}, should be strict",
|
|
{"ssl_mode": ssl_mode}
|
|
))
|
|
|
|
# INV-ZONE-002: Minimum TLS version must be 1.2+
|
|
min_tls = settings.get("min_tls_version")
|
|
valid_tls = min_tls in ("1.2", "1.3")
|
|
self.results.append(InvariantResult(
|
|
"INV-ZONE-002",
|
|
valid_tls,
|
|
f"Minimum TLS version is {min_tls}" if valid_tls else f"Minimum TLS version is {min_tls}, should be 1.2+",
|
|
{"min_tls_version": min_tls}
|
|
))
|
|
|
|
# INV-ZONE-003: Always Use HTTPS must be on
|
|
always_https = settings.get("always_use_https") == "on"
|
|
self.results.append(InvariantResult(
|
|
"INV-ZONE-003",
|
|
always_https,
|
|
"Always Use HTTPS is enabled" if always_https else "Always Use HTTPS is disabled",
|
|
{"always_use_https": settings.get("always_use_https")}
|
|
))
|
|
|
|
# INV-ZONE-004: Browser check must be on
|
|
browser_check = settings.get("browser_check") == "on"
|
|
self.results.append(InvariantResult(
|
|
"INV-ZONE-004",
|
|
browser_check,
|
|
"Browser Check is enabled" if browser_check else "Browser Check is disabled",
|
|
{"browser_check": settings.get("browser_check")}
|
|
))
|
|
|
|
# === Access Invariants ===
|
|
|
|
def _check_access_invariants(self):
|
|
"""Check Zero Trust Access invariants."""
|
|
access = self.state.get("access", {})
|
|
apps = access.get("apps", [])
|
|
|
|
# INV-ACCESS-001: All Access apps must have at least one policy
|
|
apps_without_policies = [a for a in apps if len(a.get("policies", [])) == 0]
|
|
self.results.append(InvariantResult(
|
|
"INV-ACCESS-001",
|
|
len(apps_without_policies) == 0,
|
|
"All Access apps have policies" if len(apps_without_policies) == 0 else f"{len(apps_without_policies)} apps have no policies",
|
|
{"apps_without_policies": [a.get("name") for a in apps_without_policies]}
|
|
))
|
|
|
|
# INV-ACCESS-002: No Access app in bypass mode
|
|
bypass_apps = [a for a in apps if any(
|
|
p.get("decision") == "bypass" for p in a.get("policies", [])
|
|
)]
|
|
self.results.append(InvariantResult(
|
|
"INV-ACCESS-002",
|
|
len(bypass_apps) == 0,
|
|
"No Access apps in bypass mode" if len(bypass_apps) == 0 else f"{len(bypass_apps)} apps have bypass policies",
|
|
{"bypass_apps": [a.get("name") for a in bypass_apps]}
|
|
))
|
|
|
|
# INV-ACCESS-003: Session duration should not exceed 24h
|
|
long_session_apps = [
|
|
a for a in apps
|
|
if self._parse_duration(a.get("session_duration", "24h")) > 86400
|
|
]
|
|
self.results.append(InvariantResult(
|
|
"INV-ACCESS-003",
|
|
len(long_session_apps) == 0,
|
|
"All sessions <= 24h" if len(long_session_apps) == 0 else f"{len(long_session_apps)} apps have sessions > 24h",
|
|
{"long_session_apps": [a.get("name") for a in long_session_apps]}
|
|
))
|
|
|
|
def _parse_duration(self, duration: str) -> int:
|
|
"""Parse duration string to seconds."""
|
|
if not duration:
|
|
return 0
|
|
try:
|
|
if duration.endswith("h"):
|
|
return int(duration[:-1]) * 3600
|
|
elif duration.endswith("m"):
|
|
return int(duration[:-1]) * 60
|
|
elif duration.endswith("s"):
|
|
return int(duration[:-1])
|
|
else:
|
|
return int(duration)
|
|
except (ValueError, TypeError):
|
|
return 0
|
|
|
|
# === Tunnel Invariants ===
|
|
|
|
def _check_tunnel_invariants(self):
|
|
"""Check Cloudflare Tunnel invariants."""
|
|
tunnels = self.state.get("tunnels", {})
|
|
tunnel_list = tunnels.get("list", [])
|
|
|
|
# INV-TUN-001: All tunnels must be healthy (not deleted, has connections)
|
|
active_tunnels = [t for t in tunnel_list if not t.get("deleted_at")]
|
|
unhealthy = [
|
|
t for t in active_tunnels
|
|
if len(t.get("connections", [])) == 0
|
|
]
|
|
self.results.append(InvariantResult(
|
|
"INV-TUN-001",
|
|
len(unhealthy) == 0,
|
|
f"All {len(active_tunnels)} tunnels healthy" if len(unhealthy) == 0 else f"{len(unhealthy)} tunnels have no connections",
|
|
{"unhealthy_tunnels": [t.get("name") for t in unhealthy]}
|
|
))
|
|
|
|
# INV-TUN-002: No stale/orphan tunnels (deleted but still present)
|
|
deleted_tunnels = [t for t in tunnel_list if t.get("deleted_at")]
|
|
self.results.append(InvariantResult(
|
|
"INV-TUN-002",
|
|
len(deleted_tunnels) == 0,
|
|
"No stale tunnels" if len(deleted_tunnels) == 0 else f"{len(deleted_tunnels)} deleted tunnels still present",
|
|
{"stale_tunnels": [t.get("name") for t in deleted_tunnels]}
|
|
))
|
|
|
|
# === Manifest Drift ===
|
|
|
|
def _check_manifest_drift(self):
|
|
"""Check for drift between live state and manifest."""
|
|
if not self.manifest:
|
|
return
|
|
|
|
dns = self.state.get("dns", {})
|
|
records = dns.get("records", [])
|
|
manifest_records = self.manifest.get("records", [])
|
|
|
|
# Build lookup maps
|
|
live_map = {(r.get("type"), r.get("name")): r for r in records}
|
|
manifest_map = {(r.get("type"), r.get("name")): r for r in manifest_records}
|
|
|
|
# Find drift
|
|
missing_in_live = set(manifest_map.keys()) - set(live_map.keys())
|
|
extra_in_live = set(live_map.keys()) - set(manifest_map.keys())
|
|
|
|
# INV-DRIFT-001: All manifest records must exist in live
|
|
self.results.append(InvariantResult(
|
|
"INV-DRIFT-001",
|
|
len(missing_in_live) == 0,
|
|
"All manifest records present" if len(missing_in_live) == 0 else f"{len(missing_in_live)} records missing from live",
|
|
{"missing_records": list(missing_in_live)}
|
|
))
|
|
|
|
# INV-DRIFT-002: No unexpected records in live
|
|
self.results.append(InvariantResult(
|
|
"INV-DRIFT-002",
|
|
len(extra_in_live) == 0,
|
|
"No unexpected records" if len(extra_in_live) == 0 else f"{len(extra_in_live)} unexpected records in live",
|
|
{"extra_records": list(extra_in_live)}
|
|
))
|
|
|
|
|
|
def generate_report(results: List[InvariantResult], snapshot_path: str) -> Dict[str, Any]:
|
|
"""Generate invariant check report."""
|
|
passed = [r for r in results if r.passed]
|
|
failed = [r for r in results if not r.passed]
|
|
|
|
return {
|
|
"report_type": "invariant_check",
|
|
"schema_version": "vm_invariant_v1",
|
|
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
"snapshot_path": snapshot_path,
|
|
"summary": {
|
|
"total": len(results),
|
|
"passed": len(passed),
|
|
"failed": len(failed),
|
|
"pass_rate": len(passed) / len(results) if results else 0,
|
|
},
|
|
"results": [r.to_dict() for r in results],
|
|
"failed_invariants": [r.to_dict() for r in failed],
|
|
}
|
|
|
|
|
|
def create_anomaly_receipt(failed: List[InvariantResult], snapshot_path: str) -> Optional[Dict[str, Any]]:
|
|
"""Create VaultMesh anomaly receipt for failed invariants."""
|
|
if not failed:
|
|
return None
|
|
|
|
return {
|
|
"receipt_type": "cf_invariant_anomaly",
|
|
"schema_version": "vm_cf_anomaly_v1",
|
|
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
"snapshot_path": snapshot_path,
|
|
"anomaly_count": len(failed),
|
|
"anomalies": [
|
|
{
|
|
"invariant": r.name,
|
|
"message": r.message,
|
|
"details": r.details,
|
|
}
|
|
for r in failed
|
|
],
|
|
"severity": "CRITICAL" if any(r.name.startswith("INV-DNS-002") or r.name.startswith("INV-ZONE-001") for r in failed) else "WARNING",
|
|
}
|
|
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(description="Cloudflare Invariant Checker")
|
|
parser.add_argument("--snapshot", required=True, help="Path to state snapshot JSON")
|
|
parser.add_argument("--manifest", default=os.environ.get("MANIFEST_PATH"),
|
|
help="Path to DNS manifest")
|
|
parser.add_argument("--output-dir", default=ANOMALY_DIR,
|
|
help="Output directory for reports")
|
|
args = parser.parse_args()
|
|
|
|
# Load snapshot
|
|
with open(args.snapshot) as f:
|
|
snapshot = json.load(f)
|
|
|
|
# Load manifest if provided
|
|
manifest = None
|
|
if args.manifest and os.path.exists(args.manifest):
|
|
with open(args.manifest) as f:
|
|
manifest = json.load(f)
|
|
|
|
# Ensure output directory exists
|
|
os.makedirs(args.output_dir, exist_ok=True)
|
|
|
|
# Run checks
|
|
print(f"Checking invariants for snapshot: {args.snapshot}")
|
|
checker = InvariantChecker(snapshot, manifest)
|
|
results = checker.check_all()
|
|
|
|
# Generate report
|
|
report = generate_report(results, args.snapshot)
|
|
|
|
# Write report
|
|
timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H-%M-%SZ")
|
|
report_filename = f"invariant-report-{timestamp}.json"
|
|
report_path = os.path.join(args.output_dir, report_filename)
|
|
|
|
with open(report_path, "w") as f:
|
|
json.dump(report, f, indent=2, sort_keys=True)
|
|
|
|
print(f"Report written to: {report_path}")
|
|
|
|
# Create anomaly receipt if failures
|
|
failed = [r for r in results if not r.passed]
|
|
if failed:
|
|
anomaly_receipt = create_anomaly_receipt(failed, args.snapshot)
|
|
anomaly_filename = f"anomaly-{timestamp}.json"
|
|
anomaly_path = os.path.join(args.output_dir, anomaly_filename)
|
|
|
|
with open(anomaly_path, "w") as f:
|
|
json.dump(anomaly_receipt, f, indent=2, sort_keys=True)
|
|
|
|
print(f"Anomaly receipt written to: {anomaly_path}")
|
|
|
|
# Summary
|
|
print("\n=== Invariant Check Summary ===")
|
|
print(f"Total: {report['summary']['total']}")
|
|
print(f"Passed: {report['summary']['passed']}")
|
|
print(f"Failed: {report['summary']['failed']}")
|
|
print(f"Pass Rate: {report['summary']['pass_rate']:.1%}")
|
|
|
|
if failed:
|
|
print("\n=== Failed Invariants ===")
|
|
for r in failed:
|
|
print(f" [{r.name}] {r.message}")
|
|
|
|
# Exit with appropriate code
|
|
return 0 if len(failed) == 0 else 1
|
|
|
|
|
|
if __name__ == "__main__":
|
|
sys.exit(main())
|