- Complete Cloudflare Terraform configuration (DNS, WAF, tunnels, access) - WAF Intelligence MCP server with threat analysis and ML classification - GitOps automation with PR workflows and drift detection - Observatory monitoring stack with Prometheus/Grafana - IDE operator rules for governed development - Security playbooks and compliance frameworks - Autonomous remediation and state reconciliation
409 lines
14 KiB
Python
409 lines
14 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Cloudflare State Reconciler
|
|
Fetches live Cloudflare configuration and produces cryptographically verifiable snapshots.
|
|
|
|
Usage:
|
|
python3 state-reconciler.py --zone-id <ZONE_ID> --account-id <ACCOUNT_ID>
|
|
|
|
Environment Variables:
|
|
CLOUDFLARE_API_TOKEN - API token with read permissions
|
|
CLOUDFLARE_ZONE_ID - Zone ID (optional, can use --zone-id)
|
|
CLOUDFLARE_ACCOUNT_ID - Account ID (optional, can use --account-id)
|
|
|
|
Output:
|
|
- snapshots/cloudflare-<timestamp>.json
|
|
- receipts/cf-state-<timestamp>.json
|
|
"""
|
|
|
|
import argparse
|
|
import hashlib
|
|
import json
|
|
import os
|
|
import sys
|
|
from datetime import datetime, timezone
|
|
from typing import Any, Dict, List, Optional
|
|
import requests
|
|
|
|
# Configuration
|
|
CF_API_BASE = "https://api.cloudflare.com/client/v4"
|
|
SNAPSHOT_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "snapshots")
|
|
RECEIPT_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "receipts")
|
|
|
|
|
|
class CloudflareClient:
|
|
"""Cloudflare API client for state fetching."""
|
|
|
|
def __init__(self, api_token: str):
|
|
self.api_token = api_token
|
|
self.session = requests.Session()
|
|
self.session.headers.update({
|
|
"Authorization": f"Bearer {api_token}",
|
|
"Content-Type": "application/json"
|
|
})
|
|
|
|
def _request(self, method: str, endpoint: str, **kwargs) -> Dict[str, Any]:
|
|
"""Make API request with error handling."""
|
|
url = f"{CF_API_BASE}{endpoint}"
|
|
response = self.session.request(method, url, **kwargs)
|
|
response.raise_for_status()
|
|
data = response.json()
|
|
if not data.get("success", False):
|
|
errors = data.get("errors", [])
|
|
raise Exception(f"Cloudflare API error: {errors}")
|
|
return data
|
|
|
|
def _paginate(self, endpoint: str) -> List[Dict[str, Any]]:
|
|
"""Fetch all pages of a paginated endpoint."""
|
|
results = []
|
|
page = 1
|
|
per_page = 100
|
|
|
|
while True:
|
|
data = self._request("GET", endpoint, params={"page": page, "per_page": per_page})
|
|
results.extend(data.get("result", []))
|
|
result_info = data.get("result_info", {})
|
|
total_pages = result_info.get("total_pages", 1)
|
|
if page >= total_pages:
|
|
break
|
|
page += 1
|
|
|
|
return results
|
|
|
|
# DNS
|
|
def get_dns_records(self, zone_id: str) -> List[Dict[str, Any]]:
|
|
"""Fetch all DNS records for a zone."""
|
|
return self._paginate(f"/zones/{zone_id}/dns_records")
|
|
|
|
def get_dnssec(self, zone_id: str) -> Dict[str, Any]:
|
|
"""Fetch DNSSEC status for a zone."""
|
|
data = self._request("GET", f"/zones/{zone_id}/dnssec")
|
|
return data.get("result", {})
|
|
|
|
# Zone Settings
|
|
def get_zone_settings(self, zone_id: str) -> List[Dict[str, Any]]:
|
|
"""Fetch all zone settings."""
|
|
data = self._request("GET", f"/zones/{zone_id}/settings")
|
|
return data.get("result", [])
|
|
|
|
def get_zone_info(self, zone_id: str) -> Dict[str, Any]:
|
|
"""Fetch zone information."""
|
|
data = self._request("GET", f"/zones/{zone_id}")
|
|
return data.get("result", {})
|
|
|
|
# WAF / Firewall
|
|
def get_firewall_rules(self, zone_id: str) -> List[Dict[str, Any]]:
|
|
"""Fetch firewall rules."""
|
|
return self._paginate(f"/zones/{zone_id}/firewall/rules")
|
|
|
|
def get_rulesets(self, zone_id: str) -> List[Dict[str, Any]]:
|
|
"""Fetch zone rulesets."""
|
|
data = self._request("GET", f"/zones/{zone_id}/rulesets")
|
|
return data.get("result", [])
|
|
|
|
# Access
|
|
def get_access_apps(self, account_id: str) -> List[Dict[str, Any]]:
|
|
"""Fetch Access applications."""
|
|
return self._paginate(f"/accounts/{account_id}/access/apps")
|
|
|
|
def get_access_policies(self, account_id: str, app_id: str) -> List[Dict[str, Any]]:
|
|
"""Fetch policies for an Access application."""
|
|
return self._paginate(f"/accounts/{account_id}/access/apps/{app_id}/policies")
|
|
|
|
# Tunnels
|
|
def get_tunnels(self, account_id: str) -> List[Dict[str, Any]]:
|
|
"""Fetch Cloudflare Tunnels."""
|
|
return self._paginate(f"/accounts/{account_id}/cfd_tunnel")
|
|
|
|
def get_tunnel_connections(self, account_id: str, tunnel_id: str) -> List[Dict[str, Any]]:
|
|
"""Fetch tunnel connections."""
|
|
data = self._request("GET", f"/accounts/{account_id}/cfd_tunnel/{tunnel_id}/connections")
|
|
return data.get("result", [])
|
|
|
|
# Logpush
|
|
def get_logpush_jobs(self, zone_id: str) -> List[Dict[str, Any]]:
|
|
"""Fetch Logpush jobs."""
|
|
data = self._request("GET", f"/zones/{zone_id}/logpush/jobs")
|
|
return data.get("result", [])
|
|
|
|
# API Tokens (metadata only)
|
|
def get_api_tokens(self) -> List[Dict[str, Any]]:
|
|
"""Fetch API token metadata (not secrets)."""
|
|
data = self._request("GET", "/user/tokens")
|
|
return data.get("result", [])
|
|
|
|
|
|
def compute_sha256(data: Any) -> str:
|
|
"""Compute SHA-256 hash of JSON-serialized data."""
|
|
serialized = json.dumps(data, sort_keys=True, separators=(",", ":"))
|
|
return hashlib.sha256(serialized.encode()).hexdigest()
|
|
|
|
|
|
def compute_merkle_root(hashes: List[str]) -> str:
|
|
"""Compute Merkle root from list of hashes."""
|
|
if not hashes:
|
|
return hashlib.sha256(b"").hexdigest()
|
|
|
|
# Pad to power of 2
|
|
while len(hashes) & (len(hashes) - 1) != 0:
|
|
hashes.append(hashes[-1])
|
|
|
|
while len(hashes) > 1:
|
|
new_level = []
|
|
for i in range(0, len(hashes), 2):
|
|
combined = hashes[i] + hashes[i + 1]
|
|
new_level.append(hashlib.sha256(combined.encode()).hexdigest())
|
|
hashes = new_level
|
|
|
|
return hashes[0]
|
|
|
|
|
|
def normalize_dns_record(record: Dict[str, Any]) -> Dict[str, Any]:
|
|
"""Normalize DNS record for consistent hashing."""
|
|
return {
|
|
"id": record.get("id"),
|
|
"type": record.get("type"),
|
|
"name": record.get("name"),
|
|
"content": record.get("content"),
|
|
"proxied": record.get("proxied"),
|
|
"ttl": record.get("ttl"),
|
|
"priority": record.get("priority"),
|
|
"created_on": record.get("created_on"),
|
|
"modified_on": record.get("modified_on"),
|
|
}
|
|
|
|
|
|
def normalize_tunnel(tunnel: Dict[str, Any]) -> Dict[str, Any]:
|
|
"""Normalize tunnel for consistent hashing."""
|
|
return {
|
|
"id": tunnel.get("id"),
|
|
"name": tunnel.get("name"),
|
|
"status": tunnel.get("status"),
|
|
"created_at": tunnel.get("created_at"),
|
|
"deleted_at": tunnel.get("deleted_at"),
|
|
"remote_config": tunnel.get("remote_config"),
|
|
}
|
|
|
|
|
|
def normalize_access_app(app: Dict[str, Any]) -> Dict[str, Any]:
|
|
"""Normalize Access app for consistent hashing."""
|
|
return {
|
|
"id": app.get("id"),
|
|
"name": app.get("name"),
|
|
"domain": app.get("domain"),
|
|
"type": app.get("type"),
|
|
"session_duration": app.get("session_duration"),
|
|
"auto_redirect_to_identity": app.get("auto_redirect_to_identity"),
|
|
"created_at": app.get("created_at"),
|
|
"updated_at": app.get("updated_at"),
|
|
}
|
|
|
|
|
|
def fetch_cloudflare_state(
|
|
client: CloudflareClient,
|
|
zone_id: str,
|
|
account_id: str
|
|
) -> Dict[str, Any]:
|
|
"""Fetch complete Cloudflare state."""
|
|
|
|
state = {
|
|
"metadata": {
|
|
"zone_id": zone_id,
|
|
"account_id": account_id,
|
|
"fetched_at": datetime.now(timezone.utc).isoformat(),
|
|
"schema_version": "cf_state_v1",
|
|
},
|
|
"dns": {},
|
|
"zone_settings": {},
|
|
"waf": {},
|
|
"access": {},
|
|
"tunnels": {},
|
|
"logpush": {},
|
|
"api_tokens": {},
|
|
}
|
|
|
|
print("Fetching zone info...")
|
|
state["zone_info"] = client.get_zone_info(zone_id)
|
|
|
|
print("Fetching DNS records...")
|
|
raw_dns = client.get_dns_records(zone_id)
|
|
state["dns"]["records"] = [normalize_dns_record(r) for r in raw_dns]
|
|
state["dns"]["dnssec"] = client.get_dnssec(zone_id)
|
|
|
|
print("Fetching zone settings...")
|
|
settings = client.get_zone_settings(zone_id)
|
|
state["zone_settings"] = {s["id"]: s["value"] for s in settings}
|
|
|
|
print("Fetching firewall rules...")
|
|
state["waf"]["firewall_rules"] = client.get_firewall_rules(zone_id)
|
|
state["waf"]["rulesets"] = client.get_rulesets(zone_id)
|
|
|
|
print("Fetching Access apps...")
|
|
access_apps = client.get_access_apps(account_id)
|
|
state["access"]["apps"] = []
|
|
for app in access_apps:
|
|
normalized = normalize_access_app(app)
|
|
normalized["policies"] = client.get_access_policies(account_id, app["id"])
|
|
state["access"]["apps"].append(normalized)
|
|
|
|
print("Fetching tunnels...")
|
|
tunnels = client.get_tunnels(account_id)
|
|
state["tunnels"]["list"] = []
|
|
for tunnel in tunnels:
|
|
normalized = normalize_tunnel(tunnel)
|
|
if tunnel.get("status") != "deleted":
|
|
normalized["connections"] = client.get_tunnel_connections(account_id, tunnel["id"])
|
|
state["tunnels"]["list"].append(normalized)
|
|
|
|
print("Fetching Logpush jobs...")
|
|
state["logpush"]["jobs"] = client.get_logpush_jobs(zone_id)
|
|
|
|
print("Fetching API token metadata...")
|
|
tokens = client.get_api_tokens()
|
|
# Remove sensitive fields
|
|
state["api_tokens"]["list"] = [
|
|
{
|
|
"id": t.get("id"),
|
|
"name": t.get("name"),
|
|
"status": t.get("status"),
|
|
"issued_on": t.get("issued_on"),
|
|
"modified_on": t.get("modified_on"),
|
|
"not_before": t.get("not_before"),
|
|
"expires_on": t.get("expires_on"),
|
|
}
|
|
for t in tokens
|
|
]
|
|
|
|
return state
|
|
|
|
|
|
def compute_state_hashes(state: Dict[str, Any]) -> Dict[str, str]:
|
|
"""Compute per-section hashes."""
|
|
sections = ["dns", "zone_settings", "waf", "access", "tunnels", "logpush", "api_tokens"]
|
|
hashes = {}
|
|
|
|
for section in sections:
|
|
if section in state:
|
|
hashes[section] = compute_sha256(state[section])
|
|
|
|
return hashes
|
|
|
|
|
|
def create_snapshot(state: Dict[str, Any], section_hashes: Dict[str, str], merkle_root: str) -> Dict[str, Any]:
|
|
"""Create complete snapshot with integrity data."""
|
|
return {
|
|
"snapshot_version": "1.0.0",
|
|
"created_at": datetime.now(timezone.utc).isoformat(),
|
|
"state": state,
|
|
"integrity": {
|
|
"section_hashes": section_hashes,
|
|
"merkle_root": merkle_root,
|
|
"hash_algorithm": "sha256",
|
|
}
|
|
}
|
|
|
|
|
|
def create_receipt(
|
|
snapshot_path: str,
|
|
merkle_root: str,
|
|
zone_id: str,
|
|
account_id: str
|
|
) -> Dict[str, Any]:
|
|
"""Create VaultMesh receipt for state snapshot."""
|
|
return {
|
|
"receipt_type": "cf_state_snapshot",
|
|
"schema_version": "vm_cf_snapshot_v1",
|
|
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
"zone_id": zone_id,
|
|
"account_id": account_id,
|
|
"snapshot_path": snapshot_path,
|
|
"merkle_root": merkle_root,
|
|
"hash_algorithm": "sha256",
|
|
}
|
|
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(description="Cloudflare State Reconciler")
|
|
parser.add_argument("--zone-id", default=os.environ.get("CLOUDFLARE_ZONE_ID"),
|
|
help="Cloudflare Zone ID")
|
|
parser.add_argument("--account-id", default=os.environ.get("CLOUDFLARE_ACCOUNT_ID"),
|
|
help="Cloudflare Account ID")
|
|
parser.add_argument("--output-dir", default=SNAPSHOT_DIR,
|
|
help="Output directory for snapshots")
|
|
parser.add_argument("--receipt-dir", default=RECEIPT_DIR,
|
|
help="Output directory for receipts")
|
|
args = parser.parse_args()
|
|
|
|
# Validate inputs
|
|
api_token = os.environ.get("CLOUDFLARE_API_TOKEN")
|
|
if not api_token:
|
|
print("Error: CLOUDFLARE_API_TOKEN environment variable required", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
if not args.zone_id:
|
|
print("Error: Zone ID required (--zone-id or CLOUDFLARE_ZONE_ID)", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
if not args.account_id:
|
|
print("Error: Account ID required (--account-id or CLOUDFLARE_ACCOUNT_ID)", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
# Ensure output directories exist
|
|
os.makedirs(args.output_dir, exist_ok=True)
|
|
os.makedirs(args.receipt_dir, exist_ok=True)
|
|
|
|
# Initialize client
|
|
client = CloudflareClient(api_token)
|
|
|
|
# Fetch state
|
|
print(f"Fetching Cloudflare state for zone {args.zone_id}...")
|
|
state = fetch_cloudflare_state(client, args.zone_id, args.account_id)
|
|
|
|
# Compute hashes
|
|
print("Computing integrity hashes...")
|
|
section_hashes = compute_state_hashes(state)
|
|
merkle_root = compute_merkle_root(list(section_hashes.values()))
|
|
|
|
# Create snapshot
|
|
snapshot = create_snapshot(state, section_hashes, merkle_root)
|
|
|
|
# Write snapshot
|
|
timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H-%M-%SZ")
|
|
snapshot_filename = f"cloudflare-{timestamp}.json"
|
|
snapshot_path = os.path.join(args.output_dir, snapshot_filename)
|
|
|
|
with open(snapshot_path, "w") as f:
|
|
json.dump(snapshot, f, indent=2, sort_keys=True)
|
|
|
|
print(f"Snapshot written to: {snapshot_path}")
|
|
|
|
# Create and write receipt
|
|
receipt = create_receipt(snapshot_path, merkle_root, args.zone_id, args.account_id)
|
|
receipt_filename = f"cf-state-{timestamp}.json"
|
|
receipt_path = os.path.join(args.receipt_dir, receipt_filename)
|
|
|
|
with open(receipt_path, "w") as f:
|
|
json.dump(receipt, f, indent=2, sort_keys=True)
|
|
|
|
print(f"Receipt written to: {receipt_path}")
|
|
|
|
# Summary
|
|
print("\n=== State Reconciler Summary ===")
|
|
print(f"Zone ID: {args.zone_id}")
|
|
print(f"Account ID: {args.account_id}")
|
|
print(f"Merkle Root: {merkle_root}")
|
|
print(f"DNS Records: {len(state['dns'].get('records', []))}")
|
|
print(f"Access Apps: {len(state['access'].get('apps', []))}")
|
|
print(f"Tunnels: {len(state['tunnels'].get('list', []))}")
|
|
print(f"Snapshot: {snapshot_filename}")
|
|
print(f"Receipt: {receipt_filename}")
|
|
|
|
# Output merkle root for piping
|
|
print(f"\nMERKLE_ROOT={merkle_root}")
|
|
|
|
return 0
|
|
|
|
|
|
if __name__ == "__main__":
|
|
sys.exit(main())
|