Initial commit: Cloudflare infrastructure with WAF Intelligence
- Complete Cloudflare Terraform configuration (DNS, WAF, tunnels, access) - WAF Intelligence MCP server with threat analysis and ML classification - GitOps automation with PR workflows and drift detection - Observatory monitoring stack with Prometheus/Grafana - IDE operator rules for governed development - Security playbooks and compliance frameworks - Autonomous remediation and state reconciliation
This commit is contained in:
86
waf_intel_mcp.py
Executable file
86
waf_intel_mcp.py
Executable file
@@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import glob
|
||||
from dataclasses import asdict
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from modelcontextprotocol.python import Server
|
||||
from mcp.waf_intelligence.orchestrator import WAFInsight, WAFIntelligence
|
||||
|
||||
server = Server("waf_intel")
|
||||
|
||||
|
||||
def _insight_to_dict(insight: WAFInsight) -> Dict[str, Any]:
|
||||
"""Convert a WAFInsight dataclass into a plain dict."""
|
||||
return asdict(insight)
|
||||
|
||||
|
||||
@server.tool()
|
||||
async def analyze_waf(
|
||||
file: str | None = None,
|
||||
files: List[str] | None = None,
|
||||
limit: int = 3,
|
||||
severity_threshold: str = "warning",
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Analyze one or more Terraform WAF files and return curated insights.
|
||||
|
||||
Args:
|
||||
file: Single file path (e.g. "terraform/waf.tf").
|
||||
files: Optional list of file paths or glob patterns (e.g. ["terraform/waf*.tf"]).
|
||||
limit: Max number of high-priority insights to return.
|
||||
severity_threshold: Minimum severity to include ("info", "warning", "error").
|
||||
|
||||
Returns:
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"file": "...",
|
||||
"insights": [ ... ]
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
"""
|
||||
paths: List[str] = []
|
||||
|
||||
if files:
|
||||
for pattern in files:
|
||||
for matched in glob.glob(pattern):
|
||||
paths.append(matched)
|
||||
|
||||
if file:
|
||||
paths.append(file)
|
||||
|
||||
seen = set()
|
||||
unique_paths: List[str] = []
|
||||
for p in paths:
|
||||
if p not in seen:
|
||||
seen.add(p)
|
||||
unique_paths.append(p)
|
||||
|
||||
if not unique_paths:
|
||||
raise ValueError("Please provide 'file' or 'files' to analyze.")
|
||||
|
||||
intel = WAFIntelligence()
|
||||
results: List[Dict[str, Any]] = []
|
||||
|
||||
for path in unique_paths:
|
||||
insights: List[WAFInsight] = intel.analyze_and_recommend(
|
||||
path,
|
||||
limit=limit,
|
||||
min_severity=severity_threshold,
|
||||
)
|
||||
results.append(
|
||||
{
|
||||
"file": path,
|
||||
"insights": [_insight_to_dict(insight) for insight in insights],
|
||||
}
|
||||
)
|
||||
|
||||
return {"results": results}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
server.run()
|
||||
Reference in New Issue
Block a user