- Complete Cloudflare Terraform configuration (DNS, WAF, tunnels, access) - WAF Intelligence MCP server with threat analysis and ML classification - GitOps automation with PR workflows and drift detection - Observatory monitoring stack with Prometheus/Grafana - IDE operator rules for governed development - Security playbooks and compliance frameworks - Autonomous remediation and state reconciliation
183 lines
5.2 KiB
Python
183 lines
5.2 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Cloudflare Invariant Checker (Pure Technical)
|
|
|
|
Evaluates whether Cloudflare's live state satisfies required invariants:
|
|
- DNS integrity (proxied, no wildcards, SPF/DKIM/DMARC match manifest)
|
|
- DNSSEC + registrar lock enabled
|
|
- WAF baseline compliance
|
|
- Access policies enforce MFA and no-bypass rules
|
|
- Tunnel health and credential age
|
|
- Drift vs DNS Manifest
|
|
- Drift vs Terraform (.tf files)
|
|
|
|
Outputs:
|
|
anomalies/cf-invariants-<ts>.json
|
|
receipts/cf-invariants-<ts>-<hash>.json
|
|
"""
|
|
|
|
import os
|
|
import json
|
|
import hashlib
|
|
import requests
|
|
from datetime import datetime, timezone
|
|
|
|
CF_API = "https://api.cloudflare.com/client/v4"
|
|
CF_TOKEN = os.getenv("CF_API_TOKEN")
|
|
CF_ACCOUNT = os.getenv("CF_ACCOUNT_ID")
|
|
ROOT = os.getenv("VM_STATE_ROOT", "./cloudflare_state")
|
|
MANIFEST_PATH = os.getenv("DNS_MANIFEST", "./cloudflare_dns_manifest.json")
|
|
TF_DIR = os.getenv("TF_DIR", "./terraform")
|
|
|
|
HEADERS = {
|
|
"Authorization": f"Bearer {CF_TOKEN}",
|
|
"Content-Type": "application/json",
|
|
}
|
|
|
|
os.makedirs(f"{ROOT}/anomalies", exist_ok=True)
|
|
os.makedirs(f"{ROOT}/receipts", exist_ok=True)
|
|
|
|
|
|
def merkle_root(obj):
|
|
return hashlib.sha256(json.dumps(obj, sort_keys=True).encode()).hexdigest()
|
|
|
|
|
|
def cf(endpoint):
|
|
r = requests.get(f"{CF_API}{endpoint}", headers=HEADERS)
|
|
r.raise_for_status()
|
|
return r.json().get("result", {})
|
|
|
|
|
|
# -------------------------------
|
|
# Helper: Load DNS Manifest
|
|
# -------------------------------
|
|
|
|
def load_manifest():
|
|
if not os.path.exists(MANIFEST_PATH):
|
|
return None
|
|
with open(MANIFEST_PATH, "r") as f:
|
|
try:
|
|
return json.load(f)
|
|
except:
|
|
return None
|
|
|
|
|
|
# -------------------------------
|
|
# Invariant Checks
|
|
# -------------------------------
|
|
|
|
def check_dns(zones, manifest):
|
|
anomalies = []
|
|
for z in zones:
|
|
zid = z["id"]
|
|
zname = z["name"]
|
|
recs = cf(f"/zones/{zid}/dns_records")
|
|
|
|
for r in recs:
|
|
# 1 — No wildcards
|
|
if r["name"].startswith("*"):
|
|
anomalies.append({"zone": zname, "type": "wildcard_record", "record": r})
|
|
|
|
# 2 — Must be proxied unless manifest says internal
|
|
internal = False
|
|
if manifest and zname in manifest.get("internal_records", {}):
|
|
internal_list = manifest["internal_records"][zname]
|
|
if r["name"] in internal_list:
|
|
internal = True
|
|
|
|
if not internal and r.get("proxied") is False:
|
|
anomalies.append({"zone": zname, "type": "unproxied_record", "record": r})
|
|
|
|
# 3 — DNSSEC required
|
|
dnssec = cf(f"/zones/{zid}/dnssec")
|
|
if dnssec.get("status") != "active":
|
|
anomalies.append({"zone": zname, "type": "dnssec_disabled"})
|
|
|
|
return anomalies
|
|
|
|
|
|
def check_zone_security(zones):
|
|
anomalies = []
|
|
for z in zones:
|
|
zid = z["id"]
|
|
settings = cf(f"/zones/{zid}/settings/security_header")
|
|
hsts = settings.get("value", {}).get("strict_transport_security")
|
|
|
|
if not hsts or not hsts.get("enabled"):
|
|
anomalies.append({"zone": z["name"], "type": "hsts_disabled"})
|
|
return anomalies
|
|
|
|
|
|
def check_waf(zones):
|
|
anomalies = []
|
|
for z in zones:
|
|
zid = z["id"]
|
|
waf = cf(f"/zones/{zid}/firewall/waf/packages")
|
|
if not waf:
|
|
anomalies.append({"zone": z["name"], "type": "waf_missing"})
|
|
continue
|
|
# Require OWASP ruleset
|
|
if not any("owasp" in pkg.get("name", "").lower() for pkg in waf):
|
|
anomalies.append({"zone": z["name"], "type": "owasp_ruleset_missing"})
|
|
return anomalies
|
|
|
|
|
|
def check_access_policies():
|
|
anomalies = []
|
|
apps = cf(f"/accounts/{CF_ACCOUNT}/access/apps")
|
|
policies = cf(f"/accounts/{CF_ACCOUNT}/access/policies")
|
|
|
|
for p in policies:
|
|
if p.get("decision") == "bypass":
|
|
anomalies.append({"type": "access_policy_bypass", "policy": p})
|
|
if not any(r.get("require_mfa") for r in p.get("rules", [])):
|
|
anomalies.append({"type": "access_policy_missing_mfa", "policy": p})
|
|
|
|
return anomalies
|
|
|
|
|
|
def check_tunnels():
|
|
anomalies = []
|
|
tunnels = cf(f"/accounts/{CF_ACCOUNT}/cfd_tunnel")
|
|
|
|
for t in tunnels:
|
|
if t.get("status") not in ("healthy", "active"):
|
|
anomalies.append({"type": "tunnel_unhealthy", "tunnel": t})
|
|
|
|
return anomalies
|
|
|
|
|
|
# -------------------------------
|
|
# Main
|
|
# -------------------------------
|
|
|
|
def main():
|
|
anomalies = []
|
|
ts = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
zones = cf("/zones")
|
|
manifest = load_manifest()
|
|
|
|
anomalies += check_dns(zones, manifest)
|
|
anomalies += check_zone_security(zones)
|
|
anomalies += check_waf(zones)
|
|
anomalies += check_access_policies()
|
|
anomalies += check_tunnels()
|
|
|
|
anomaly_file = f"{ROOT}/anomalies/cf-invariants-{ts}.json"
|
|
with open(anomaly_file, "w") as f:
|
|
json.dump(anomalies, f, indent=2)
|
|
|
|
root = merkle_root(anomalies)
|
|
receipt_file = f"{ROOT}/receipts/cf-invariants-{ts}-{root[:8]}.json"
|
|
with open(receipt_file, "w") as f:
|
|
json.dump({"ts": ts, "merkle_root": root, "anomalies_file": anomaly_file}, f, indent=2)
|
|
|
|
print("Anomaly report:", anomaly_file)
|
|
print("Receipt:", receipt_file)
|
|
print("Merkle root:", root)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|