chore: pre-migration snapshot
Layer0, MCP servers, Terraform consolidation
This commit is contained in:
308
scripts/deploy_infrastructure.sh
Normal file
308
scripts/deploy_infrastructure.sh
Normal file
@@ -0,0 +1,308 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Cloudflare Infrastructure Deployment Automation
|
||||
# Automated Terraform deployment with safety checks and rollback capabilities
|
||||
|
||||
set -e
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Configuration
|
||||
TERRAFORM_DIR="terraform"
|
||||
BACKUP_DIR="terraform_backups"
|
||||
STATE_FILE="terraform.tfstate"
|
||||
PLAN_FILE="deployment_plan.tfplan"
|
||||
LOG_FILE="deployment_$(date +%Y%m%d_%H%M%S).log"
|
||||
|
||||
# Function to log messages
|
||||
log() {
|
||||
echo -e "${BLUE}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1" | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
# Function to log success
|
||||
success() {
|
||||
echo -e "${GREEN}✅ $1${NC}" | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
# Function to log warning
|
||||
warning() {
|
||||
echo -e "${YELLOW}⚠️ $1${NC}" | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
# Function to log error
|
||||
error() {
|
||||
echo -e "${RED}❌ $1${NC}" | tee -a "$LOG_FILE"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Function to check prerequisites
|
||||
check_prerequisites() {
|
||||
log "Checking prerequisites..."
|
||||
|
||||
# Check if .env file exists
|
||||
if [[ ! -f "../.env" ]]; then
|
||||
error "Missing .env file. Run setup_credentials.sh first."
|
||||
fi
|
||||
|
||||
# Source environment variables
|
||||
source "../.env"
|
||||
|
||||
# Check required variables
|
||||
if [[ -z "$CLOUDFLARE_API_TOKEN" ]]; then
|
||||
error "CLOUDFLARE_API_TOKEN not set in .env"
|
||||
fi
|
||||
|
||||
if [[ -z "$CLOUDFLARE_ACCOUNT_ID" ]]; then
|
||||
error "CLOUDFLARE_ACCOUNT_ID not set in .env"
|
||||
fi
|
||||
|
||||
# Check Terraform installation
|
||||
if ! command -v terraform &> /dev/null; then
|
||||
error "Terraform not found. Please install Terraform first."
|
||||
fi
|
||||
|
||||
# Check Terraform version
|
||||
TF_VERSION=$(terraform version | head -n1 | awk '{print $2}' | sed 's/v//')
|
||||
log "Terraform version: $TF_VERSION"
|
||||
|
||||
success "Prerequisites check passed"
|
||||
}
|
||||
|
||||
# Function to backup current state
|
||||
backup_state() {
|
||||
log "Creating backup of current state..."
|
||||
|
||||
# Create backup directory
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
|
||||
# Backup state file if it exists
|
||||
if [[ -f "$STATE_FILE" ]]; then
|
||||
BACKUP_NAME="${BACKUP_DIR}/state_backup_$(date +%Y%m%d_%H%M%S).tfstate"
|
||||
cp "$STATE_FILE" "$BACKUP_NAME"
|
||||
success "State backed up to: $BACKUP_NAME"
|
||||
else
|
||||
warning "No existing state file found"
|
||||
fi
|
||||
|
||||
# Backup terraform.tfvars
|
||||
if [[ -f "terraform.tfvars" ]]; then
|
||||
cp "terraform.tfvars" "${BACKUP_DIR}/terraform.tfvars.backup"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to prepare terraform.tfvars
|
||||
prepare_config() {
|
||||
log "Preparing Terraform configuration..."
|
||||
|
||||
# Update terraform.tfvars with actual credentials
|
||||
cat > terraform.tfvars << EOF
|
||||
cloudflare_api_token = "$CLOUDFLARE_API_TOKEN"
|
||||
cloudflare_account_id = "$CLOUDFLARE_ACCOUNT_ID"
|
||||
cloudflare_account_name = "" # Use account_id from .env
|
||||
EOF
|
||||
|
||||
# Add optional Zone ID if set
|
||||
if [[ -n "$CLOUDFLARE_ZONE_ID" ]]; then
|
||||
echo "cloudflare_zone_id = \"$CLOUDFLARE_ZONE_ID\"" >> terraform.tfvars
|
||||
fi
|
||||
|
||||
success "Configuration prepared"
|
||||
}
|
||||
|
||||
# Function to initialize Terraform
|
||||
init_terraform() {
|
||||
log "Initializing Terraform..."
|
||||
|
||||
if terraform init -upgrade; then
|
||||
success "Terraform initialized successfully"
|
||||
else
|
||||
error "Terraform initialization failed"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to validate Terraform configuration
|
||||
validate_config() {
|
||||
log "Validating Terraform configuration..."
|
||||
|
||||
if terraform validate; then
|
||||
success "Configuration validation passed"
|
||||
else
|
||||
error "Configuration validation failed"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to create deployment plan
|
||||
create_plan() {
|
||||
log "Creating deployment plan..."
|
||||
|
||||
if terraform plan -out="$PLAN_FILE" -detailed-exitcode; then
|
||||
case $? in
|
||||
0)
|
||||
success "No changes needed"
|
||||
return 0
|
||||
;;
|
||||
2)
|
||||
success "Plan created successfully"
|
||||
return 2
|
||||
;;
|
||||
*)
|
||||
error "Plan creation failed"
|
||||
;;
|
||||
esac
|
||||
else
|
||||
error "Plan creation failed"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to show plan summary
|
||||
show_plan_summary() {
|
||||
log "Plan Summary:"
|
||||
terraform show -json "$PLAN_FILE" | jq -r '
|
||||
.resource_changes[] |
|
||||
select(.change.actions != ["no-op"]) |
|
||||
"\(.change.actions | join(",")) \(.type).\(.name)"
|
||||
' | sort | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
# Function to confirm deployment
|
||||
confirm_deployment() {
|
||||
echo
|
||||
echo "=================================================="
|
||||
echo "🚀 DEPLOYMENT CONFIRMATION"
|
||||
echo "=================================================="
|
||||
echo
|
||||
echo "The following changes will be applied:"
|
||||
show_plan_summary
|
||||
echo
|
||||
echo "Log file: $LOG_FILE"
|
||||
echo "Backup directory: $BACKUP_DIR"
|
||||
echo
|
||||
read -p "Do you want to proceed with deployment? (y/n): " -n 1 -r
|
||||
echo
|
||||
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
log "Deployment cancelled by user"
|
||||
exit 0
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to apply deployment
|
||||
apply_deployment() {
|
||||
log "Applying deployment..."
|
||||
|
||||
if terraform apply "$PLAN_FILE"; then
|
||||
success "Deployment applied successfully"
|
||||
else
|
||||
error "Deployment failed"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to verify deployment
|
||||
verify_deployment() {
|
||||
log "Verifying deployment..."
|
||||
|
||||
# Check if resources were created successfully
|
||||
OUTPUTS=$(terraform output -json)
|
||||
|
||||
if [[ -n "$OUTPUTS" ]]; then
|
||||
success "Deployment verification passed"
|
||||
echo "Outputs:"
|
||||
terraform output
|
||||
else
|
||||
warning "No outputs generated - manual verification required"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to cleanup temporary files
|
||||
cleanup() {
|
||||
log "Cleaning up temporary files..."
|
||||
|
||||
if [[ -f "$PLAN_FILE" ]]; then
|
||||
rm "$PLAN_FILE"
|
||||
success "Plan file removed"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to show deployment summary
|
||||
deployment_summary() {
|
||||
echo
|
||||
echo "=================================================="
|
||||
echo "🎉 DEPLOYMENT SUMMARY"
|
||||
echo "=================================================="
|
||||
echo
|
||||
echo "✅ Infrastructure deployed successfully"
|
||||
echo "📋 Log file: $LOG_FILE"
|
||||
echo "💾 Backups: $BACKUP_DIR"
|
||||
echo "🌐 Resources deployed:"
|
||||
terraform state list
|
||||
echo
|
||||
echo "Next steps:"
|
||||
echo "1. Check Cloudflare dashboard for deployed resources"
|
||||
echo "2. Test DNS resolution for your domains"
|
||||
echo "3. Verify WAF rules are active"
|
||||
echo "4. Test tunnel connectivity"
|
||||
echo
|
||||
}
|
||||
|
||||
# Function to handle rollback
|
||||
rollback() {
|
||||
error "Deployment failed - rolling back..."
|
||||
|
||||
# Check if we have a backup
|
||||
LATEST_BACKUP=$(ls -t "${BACKUP_DIR}/state_backup_*.tfstate" 2>/dev/null | head -n1)
|
||||
|
||||
if [[ -n "$LATEST_BACKUP" ]]; then
|
||||
log "Restoring from backup: $LATEST_BACKUP"
|
||||
cp "$LATEST_BACKUP" "$STATE_FILE"
|
||||
warning "State restored from backup. Manual verification required."
|
||||
else
|
||||
error "No backup available for rollback"
|
||||
fi
|
||||
}
|
||||
|
||||
# Main deployment function
|
||||
main() {
|
||||
echo "🚀 Cloudflare Infrastructure Deployment"
|
||||
echo "=================================================="
|
||||
echo
|
||||
|
||||
# Change to Terraform directory
|
||||
cd "$TERRAFORM_DIR" || error "Terraform directory not found"
|
||||
|
||||
# Set trap for cleanup on exit
|
||||
trap cleanup EXIT
|
||||
|
||||
# Execute deployment steps
|
||||
check_prerequisites
|
||||
backup_state
|
||||
prepare_config
|
||||
init_terraform
|
||||
validate_config
|
||||
|
||||
# Create plan and check if changes are needed
|
||||
if create_plan; then
|
||||
case $? in
|
||||
0)
|
||||
success "No changes needed - infrastructure is up to date"
|
||||
exit 0
|
||||
;;
|
||||
2)
|
||||
confirm_deployment
|
||||
apply_deployment
|
||||
verify_deployment
|
||||
deployment_summary
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
}
|
||||
|
||||
# Handle errors
|
||||
trap 'rollback' ERR
|
||||
|
||||
# Run main function
|
||||
main "$@"
|
||||
421
scripts/incident_response_playbooks.py
Normal file
421
scripts/incident_response_playbooks.py
Normal file
@@ -0,0 +1,421 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cloudflare Incident Response Playbooks
|
||||
Standardized procedures for common infrastructure incidents
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from typing import Dict, List, Optional
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class IncidentSeverity(str, Enum):
|
||||
"""Incident severity levels"""
|
||||
|
||||
LOW = "low"
|
||||
MEDIUM = "medium"
|
||||
HIGH = "high"
|
||||
CRITICAL = "critical"
|
||||
|
||||
|
||||
class IncidentType(str, Enum):
|
||||
"""Types of infrastructure incidents"""
|
||||
|
||||
DNS_OUTAGE = "dns_outage"
|
||||
WAF_BYPASS = "waf_bypass"
|
||||
TUNNEL_FAILURE = "tunnel_failure"
|
||||
SECURITY_BREACH = "security_breach"
|
||||
CONFIGURATION_ERROR = "configuration_error"
|
||||
PERFORMANCE_DEGRADATION = "performance_degradation"
|
||||
|
||||
|
||||
@dataclass
|
||||
class IncidentResponse:
|
||||
"""Incident response procedure"""
|
||||
|
||||
incident_type: IncidentType
|
||||
severity: IncidentSeverity
|
||||
immediate_actions: List[str]
|
||||
investigation_steps: List[str]
|
||||
recovery_procedures: List[str]
|
||||
prevention_measures: List[str]
|
||||
escalation_path: List[str]
|
||||
time_to_resolve: str
|
||||
|
||||
|
||||
class IncidentResponsePlaybook:
|
||||
"""Collection of incident response playbooks"""
|
||||
|
||||
def __init__(self):
|
||||
self.playbooks = self._initialize_playbooks()
|
||||
|
||||
def _initialize_playbooks(self) -> Dict[IncidentType, IncidentResponse]:
|
||||
"""Initialize all incident response playbooks"""
|
||||
return {
|
||||
IncidentType.DNS_OUTAGE: IncidentResponse(
|
||||
incident_type=IncidentType.DNS_OUTAGE,
|
||||
severity=IncidentSeverity.HIGH,
|
||||
immediate_actions=[
|
||||
"Verify DNS resolution using external tools (dig, nslookup)",
|
||||
"Check Cloudflare DNS dashboard for zone status",
|
||||
"Review recent DNS changes in version control",
|
||||
"Verify origin server connectivity",
|
||||
"Check Cloudflare status page for service issues",
|
||||
],
|
||||
investigation_steps=[
|
||||
"Examine DNS record changes in Git history",
|
||||
"Check Terraform state for unexpected modifications",
|
||||
"Review Cloudflare audit logs for recent changes",
|
||||
"Verify DNS propagation using multiple geographic locations",
|
||||
"Check for DNSSEC configuration issues",
|
||||
],
|
||||
recovery_procedures=[
|
||||
"Rollback recent DNS changes using Terraform",
|
||||
"Manually restore critical DNS records if needed",
|
||||
"Update TTL values for faster propagation",
|
||||
"Contact Cloudflare support if service-related",
|
||||
"Implement traffic rerouting if necessary",
|
||||
],
|
||||
prevention_measures=[
|
||||
"Implement DNS change approval workflows",
|
||||
"Use Terraform plan/apply with peer review",
|
||||
"Monitor DNS resolution from multiple locations",
|
||||
"Implement automated DNS health checks",
|
||||
"Maintain backup DNS configurations",
|
||||
],
|
||||
escalation_path=[
|
||||
"Primary DNS Administrator",
|
||||
"Infrastructure Team Lead",
|
||||
"Cloudflare Support",
|
||||
"Security Team",
|
||||
],
|
||||
time_to_resolve="1-4 hours",
|
||||
),
|
||||
IncidentType.WAF_BYPASS: IncidentResponse(
|
||||
incident_type=IncidentType.WAF_BYPASS,
|
||||
severity=IncidentSeverity.CRITICAL,
|
||||
immediate_actions=[
|
||||
"Immediately review WAF event logs for suspicious activity",
|
||||
"Check for recent WAF rule modifications",
|
||||
"Verify WAF rule package status and mode",
|
||||
"Temporarily block suspicious IP addresses",
|
||||
"Enable challenge mode for suspicious traffic patterns",
|
||||
],
|
||||
investigation_steps=[
|
||||
"Analyze WAF rule changes in version control",
|
||||
"Review Cloudflare firewall event logs",
|
||||
"Check for anomalous traffic patterns",
|
||||
"Verify WAF rule effectiveness using test payloads",
|
||||
"Examine rate limiting and threat score thresholds",
|
||||
],
|
||||
recovery_procedures=[
|
||||
"Rollback WAF rule changes to known good state",
|
||||
"Implement emergency WAF rules to block attack patterns",
|
||||
"Update threat intelligence feeds",
|
||||
"Increase security level for affected zones",
|
||||
"Deploy additional security measures (Bot Fight Mode, etc.)",
|
||||
],
|
||||
prevention_measures=[
|
||||
"Implement WAF change approval workflows",
|
||||
"Regular security testing of WAF rules",
|
||||
"Monitor WAF event logs for anomalies",
|
||||
"Implement automated WAF rule validation",
|
||||
"Regular security awareness training",
|
||||
],
|
||||
escalation_path=[
|
||||
"Security Incident Response Team",
|
||||
"WAF Administrator",
|
||||
"Infrastructure Security Lead",
|
||||
"CISO/Management",
|
||||
],
|
||||
time_to_resolve="2-6 hours",
|
||||
),
|
||||
IncidentType.TUNNEL_FAILURE: IncidentResponse(
|
||||
incident_type=IncidentType.TUNNEL_FAILURE,
|
||||
severity=IncidentSeverity.MEDIUM,
|
||||
immediate_actions=[
|
||||
"Check Cloudflare Tunnel status and connectivity",
|
||||
"Verify origin server availability and configuration",
|
||||
"Check tunnel connector logs for errors",
|
||||
"Restart tunnel connector service if needed",
|
||||
"Verify DNS records point to correct tunnel endpoints",
|
||||
],
|
||||
investigation_steps=[
|
||||
"Review recent tunnel configuration changes",
|
||||
"Check network connectivity between connector and Cloudflare",
|
||||
"Examine tunnel connector resource usage",
|
||||
"Verify certificate validity and renewal status",
|
||||
"Check for firewall/network policy changes",
|
||||
],
|
||||
recovery_procedures=[
|
||||
"Restart tunnel connector with updated configuration",
|
||||
"Rollback recent tunnel configuration changes",
|
||||
"Recreate tunnel connector if necessary",
|
||||
"Update DNS records to alternative endpoints",
|
||||
"Implement traffic failover mechanisms",
|
||||
],
|
||||
prevention_measures=[
|
||||
"Implement tunnel health monitoring",
|
||||
"Use redundant tunnel configurations",
|
||||
"Regular tunnel connector updates and maintenance",
|
||||
"Monitor certificate expiration dates",
|
||||
"Implement automated tunnel failover",
|
||||
],
|
||||
escalation_path=[
|
||||
"Network Administrator",
|
||||
"Infrastructure Team",
|
||||
"Cloudflare Support",
|
||||
"Security Team",
|
||||
],
|
||||
time_to_resolve="1-3 hours",
|
||||
),
|
||||
IncidentType.SECURITY_BREACH: IncidentResponse(
|
||||
incident_type=IncidentType.SECURITY_BREACH,
|
||||
severity=IncidentSeverity.CRITICAL,
|
||||
immediate_actions=[
|
||||
"Isolate affected systems and services immediately",
|
||||
"Preserve logs and evidence for forensic analysis",
|
||||
"Change all relevant credentials and API tokens",
|
||||
"Notify security incident response team",
|
||||
"Implement emergency security controls",
|
||||
],
|
||||
investigation_steps=[
|
||||
"Conduct forensic analysis of compromised systems",
|
||||
"Review Cloudflare audit logs for unauthorized access",
|
||||
"Check for API token misuse or unauthorized changes",
|
||||
"Examine DNS/WAF/Tunnel configuration changes",
|
||||
"Coordinate with legal and compliance teams",
|
||||
],
|
||||
recovery_procedures=[
|
||||
"Rotate all Cloudflare API tokens and credentials",
|
||||
"Restore configurations from verified backups",
|
||||
"Implement enhanced security monitoring",
|
||||
"Conduct post-incident security assessment",
|
||||
"Update incident response procedures based on lessons learned",
|
||||
],
|
||||
prevention_measures=[
|
||||
"Implement multi-factor authentication",
|
||||
"Regular security audits and penetration testing",
|
||||
"Monitor for suspicious API activity",
|
||||
"Implement least privilege access controls",
|
||||
"Regular security awareness training",
|
||||
],
|
||||
escalation_path=[
|
||||
"Security Incident Response Team",
|
||||
"CISO/Management",
|
||||
"Legal Department",
|
||||
"External Security Consultants",
|
||||
],
|
||||
time_to_resolve="4-24 hours",
|
||||
),
|
||||
IncidentType.CONFIGURATION_ERROR: IncidentResponse(
|
||||
incident_type=IncidentType.CONFIGURATION_ERROR,
|
||||
severity=IncidentSeverity.MEDIUM,
|
||||
immediate_actions=[
|
||||
"Identify the specific configuration error",
|
||||
"Assess impact on services and users",
|
||||
"Check version control for recent changes",
|
||||
"Verify Terraform plan output for unexpected changes",
|
||||
"Communicate status to stakeholders",
|
||||
],
|
||||
investigation_steps=[
|
||||
"Review Git commit history for configuration changes",
|
||||
"Examine Terraform state differences",
|
||||
"Check Cloudflare configuration against documented standards",
|
||||
"Verify configuration consistency across environments",
|
||||
"Identify root cause of configuration error",
|
||||
],
|
||||
recovery_procedures=[
|
||||
"Rollback configuration using Terraform",
|
||||
"Apply corrected configuration changes",
|
||||
"Verify service restoration and functionality",
|
||||
"Update configuration documentation",
|
||||
"Implement configuration validation checks",
|
||||
],
|
||||
prevention_measures=[
|
||||
"Implement configuration change approval workflows",
|
||||
"Use infrastructure as code with peer review",
|
||||
"Implement automated configuration validation",
|
||||
"Regular configuration audits",
|
||||
"Maintain configuration documentation",
|
||||
],
|
||||
escalation_path=[
|
||||
"Configuration Administrator",
|
||||
"Infrastructure Team Lead",
|
||||
"Quality Assurance Team",
|
||||
"Management",
|
||||
],
|
||||
time_to_resolve="1-4 hours",
|
||||
),
|
||||
IncidentType.PERFORMANCE_DEGRADATION: IncidentResponse(
|
||||
incident_type=IncidentType.PERFORMANCE_DEGRADATION,
|
||||
severity=IncidentSeverity.LOW,
|
||||
immediate_actions=[
|
||||
"Monitor performance metrics and identify bottlenecks",
|
||||
"Check Cloudflare analytics for traffic patterns",
|
||||
"Verify origin server performance and resource usage",
|
||||
"Review recent configuration changes",
|
||||
"Implement temporary performance optimizations",
|
||||
],
|
||||
investigation_steps=[
|
||||
"Analyze performance metrics over time",
|
||||
"Check for DDoS attacks or abnormal traffic patterns",
|
||||
"Review caching configuration and hit rates",
|
||||
"Examine origin server response times",
|
||||
"Identify specific performance bottlenecks",
|
||||
],
|
||||
recovery_procedures=[
|
||||
"Optimize caching configuration",
|
||||
"Adjust performance settings (Polish, Mirage, etc.)",
|
||||
"Implement rate limiting if under attack",
|
||||
"Scale origin server resources if needed",
|
||||
"Update CDN configuration for better performance",
|
||||
],
|
||||
prevention_measures=[
|
||||
"Implement performance monitoring and alerting",
|
||||
"Regular performance testing and optimization",
|
||||
"Capacity planning and resource forecasting",
|
||||
"Implement automated scaling mechanisms",
|
||||
"Regular performance reviews and optimizations",
|
||||
],
|
||||
escalation_path=[
|
||||
"Performance Monitoring Team",
|
||||
"Infrastructure Team",
|
||||
"Application Development Team",
|
||||
"Management",
|
||||
],
|
||||
time_to_resolve="2-8 hours",
|
||||
),
|
||||
}
|
||||
|
||||
def get_playbook(self, incident_type: IncidentType) -> Optional[IncidentResponse]:
|
||||
"""Get the playbook for a specific incident type"""
|
||||
return self.playbooks.get(incident_type)
|
||||
|
||||
def list_playbooks(self) -> List[IncidentType]:
|
||||
"""List all available playbooks"""
|
||||
return list(self.playbooks.keys())
|
||||
|
||||
def execute_playbook(
|
||||
self, incident_type: IncidentType, custom_context: Optional[Dict] = None
|
||||
) -> Dict:
|
||||
"""Execute a specific incident response playbook"""
|
||||
playbook = self.get_playbook(incident_type)
|
||||
|
||||
if not playbook:
|
||||
return {"error": f"No playbook found for incident type: {incident_type}"}
|
||||
|
||||
execution_log = {
|
||||
"incident_type": incident_type.value,
|
||||
"severity": playbook.severity.value,
|
||||
"start_time": datetime.now().isoformat(),
|
||||
"steps_completed": [],
|
||||
"custom_context": custom_context or {},
|
||||
}
|
||||
|
||||
# Simulate execution (in real implementation, this would trigger actual actions)
|
||||
execution_log["steps_completed"].extend(
|
||||
[
|
||||
f"Initiated {incident_type.value} response procedure",
|
||||
f"Severity level: {playbook.severity.value}",
|
||||
"Notified escalation path contacts",
|
||||
]
|
||||
)
|
||||
|
||||
execution_log["estimated_resolution_time"] = playbook.time_to_resolve
|
||||
execution_log["completion_status"] = "in_progress"
|
||||
|
||||
return execution_log
|
||||
|
||||
|
||||
def main():
|
||||
"""Command-line interface for incident response playbooks"""
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Cloudflare Incident Response Playbooks"
|
||||
)
|
||||
parser.add_argument(
|
||||
"action", choices=["list", "show", "execute"], help="Action to perform"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--type", choices=[t.value for t in IncidentType], help="Incident type"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
playbook_manager = IncidentResponsePlaybook()
|
||||
|
||||
if args.action == "list":
|
||||
print("📋 Available Incident Response Playbooks:")
|
||||
print("-" * 50)
|
||||
for incident_type in playbook_manager.list_playbooks():
|
||||
playbook = playbook_manager.get_playbook(incident_type)
|
||||
if not playbook:
|
||||
continue
|
||||
|
||||
print(f"🔸 {incident_type.value}")
|
||||
print(f" Severity: {playbook.severity.value}")
|
||||
print(f" Resolution Time: {playbook.time_to_resolve}")
|
||||
print()
|
||||
|
||||
elif args.action == "show":
|
||||
if not args.type:
|
||||
print("❌ Error: --type argument required")
|
||||
return
|
||||
|
||||
try:
|
||||
incident_type = IncidentType(args.type)
|
||||
except ValueError:
|
||||
print(f"❌ Error: Invalid incident type: {args.type}")
|
||||
return
|
||||
|
||||
playbook = playbook_manager.get_playbook(incident_type)
|
||||
if not playbook:
|
||||
print(f"❌ Error: No playbook found for {args.type}")
|
||||
return
|
||||
|
||||
print(f"🔍 Incident Response Playbook: {incident_type.value}")
|
||||
print("=" * 60)
|
||||
print(f"Severity: {playbook.severity.value}")
|
||||
print(f"Estimated Resolution: {playbook.time_to_resolve}")
|
||||
|
||||
print("\n🚨 Immediate Actions:")
|
||||
for i, action in enumerate(playbook.immediate_actions, 1):
|
||||
print(f" {i}. {action}")
|
||||
|
||||
print("\n🔍 Investigation Steps:")
|
||||
for i, step in enumerate(playbook.investigation_steps, 1):
|
||||
print(f" {i}. {step}")
|
||||
|
||||
print("\n🔄 Recovery Procedures:")
|
||||
for i, procedure in enumerate(playbook.recovery_procedures, 1):
|
||||
print(f" {i}. {procedure}")
|
||||
|
||||
print("\n🛡️ Prevention Measures:")
|
||||
for i, measure in enumerate(playbook.prevention_measures, 1):
|
||||
print(f" {i}. {measure}")
|
||||
|
||||
print("\n📞 Escalation Path:")
|
||||
for i, contact in enumerate(playbook.escalation_path, 1):
|
||||
print(f" {i}. {contact}")
|
||||
|
||||
elif args.action == "execute":
|
||||
if not args.type:
|
||||
print("❌ Error: --type argument required")
|
||||
return
|
||||
|
||||
try:
|
||||
incident_type = IncidentType(args.type)
|
||||
except ValueError:
|
||||
print(f"❌ Error: Invalid incident type: {args.type}")
|
||||
return
|
||||
|
||||
result = playbook_manager.execute_playbook(incident_type)
|
||||
print(f"🚀 Executing {incident_type.value} Incident Response")
|
||||
print(f"📊 Result: {result}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
260
scripts/monitoring_dashboard.py
Normal file
260
scripts/monitoring_dashboard.py
Normal file
@@ -0,0 +1,260 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cloudflare Infrastructure Monitoring Dashboard
|
||||
Provides real-time monitoring of Cloudflare resources and services
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
import requests
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Any
|
||||
|
||||
|
||||
class CloudflareMonitor:
|
||||
def __init__(self):
|
||||
self.base_url = "https://api.cloudflare.com/client/v4"
|
||||
self.headers = {
|
||||
"Authorization": f"Bearer {os.getenv('CLOUDFLARE_API_TOKEN')}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
self.account_id = os.getenv("CLOUDFLARE_ACCOUNT_ID")
|
||||
|
||||
if not self.account_id or not os.getenv("CLOUDFLARE_API_TOKEN"):
|
||||
raise ValueError("Missing Cloudflare credentials in environment")
|
||||
|
||||
def make_request(self, endpoint: str) -> Dict[str, Any]:
|
||||
"""Make API request with error handling"""
|
||||
url = f"{self.base_url}{endpoint}"
|
||||
try:
|
||||
response = requests.get(url, headers=self.headers, timeout=10)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.RequestException as e:
|
||||
return {"success": False, "errors": [str(e)]}
|
||||
|
||||
def get_account_info(self) -> Dict[str, Any]:
|
||||
"""Get account information"""
|
||||
return self.make_request(f"/accounts/{self.account_id}")
|
||||
|
||||
def get_zones(self) -> List[Dict[str, Any]]:
|
||||
"""Get all zones"""
|
||||
result = self.make_request(f"/zones?account.id={self.account_id}&per_page=50")
|
||||
return result.get("result", []) if result.get("success") else []
|
||||
|
||||
def get_zone_analytics(self, zone_id: str) -> Dict[str, Any]:
|
||||
"""Get zone analytics for the last hour"""
|
||||
since = (datetime.now() - timedelta(hours=1)).isoformat()
|
||||
return self.make_request(f"/zones/{zone_id}/analytics/dashboard?since={since}")
|
||||
|
||||
def get_waf_rules(self, zone_id: str) -> List[Dict[str, Any]]:
|
||||
"""Get WAF rules for a zone"""
|
||||
result = self.make_request(f"/zones/{zone_id}/firewall/waf/packages")
|
||||
if result.get("success"):
|
||||
packages = result.get("result", [])
|
||||
rules = []
|
||||
for package in packages:
|
||||
rules_result = self.make_request(
|
||||
f"/zones/{zone_id}/firewall/waf/packages/{package['id']}/rules"
|
||||
)
|
||||
if rules_result.get("success"):
|
||||
rules.extend(rules_result.get("result", []))
|
||||
return rules
|
||||
return []
|
||||
|
||||
def get_tunnels(self) -> List[Dict[str, Any]]:
|
||||
"""Get Cloudflare Tunnels"""
|
||||
result = self.make_request(f"/accounts/{self.account_id}/cfd_tunnel")
|
||||
return result.get("result", []) if result.get("success") else []
|
||||
|
||||
def get_dns_records(self, zone_id: str) -> List[Dict[str, Any]]:
|
||||
"""Get DNS records for a zone"""
|
||||
result = self.make_request(f"/zones/{zone_id}/dns_records?per_page=100")
|
||||
return result.get("result", []) if result.get("success") else []
|
||||
|
||||
def get_health_status(self) -> Dict[str, Any]:
|
||||
"""Get overall health status"""
|
||||
status = "healthy"
|
||||
issues = []
|
||||
|
||||
# Check zones
|
||||
zones = self.get_zones()
|
||||
if not zones:
|
||||
issues.append("No zones found")
|
||||
status = "warning"
|
||||
|
||||
# Check account access
|
||||
account_info = self.get_account_info()
|
||||
if not account_info.get("success"):
|
||||
issues.append("Account access failed")
|
||||
status = "critical"
|
||||
|
||||
return {"status": status, "issues": issues}
|
||||
|
||||
|
||||
def format_table(data: List[Dict[str, Any]], headers: List[str]) -> str:
|
||||
"""Format data as a table"""
|
||||
if not data:
|
||||
return "No data available"
|
||||
|
||||
# Calculate column widths
|
||||
col_widths = [len(header) for header in headers]
|
||||
for row in data:
|
||||
for i, header in enumerate(headers):
|
||||
value = str(row.get(header, ""))
|
||||
col_widths[i] = max(col_widths[i], len(value))
|
||||
|
||||
# Create header row
|
||||
header_row = " | ".join(
|
||||
header.ljust(col_widths[i]) for i, header in enumerate(headers)
|
||||
)
|
||||
separator = "-" * len(header_row)
|
||||
|
||||
# Create data rows
|
||||
rows = [header_row, separator]
|
||||
for row in data:
|
||||
row_data = [
|
||||
str(row.get(header, "")).ljust(col_widths[i])
|
||||
for i, header in enumerate(headers)
|
||||
]
|
||||
rows.append(" | ".join(row_data))
|
||||
|
||||
return "\n".join(rows)
|
||||
|
||||
|
||||
def main():
|
||||
print("🌐 Cloudflare Infrastructure Monitoring Dashboard")
|
||||
print("=" * 60)
|
||||
print(f"Timestamp: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
print()
|
||||
|
||||
try:
|
||||
monitor = CloudflareMonitor()
|
||||
|
||||
# Health check
|
||||
print("🔍 Health Status")
|
||||
print("-" * 30)
|
||||
health = monitor.get_health_status()
|
||||
status_emoji = {"healthy": "✅", "warning": "⚠️", "critical": "❌"}
|
||||
print(
|
||||
f"Status: {status_emoji.get(health['status'], '❓')} {health['status'].upper()}"
|
||||
)
|
||||
if health["issues"]:
|
||||
for issue in health["issues"]:
|
||||
print(f" - {issue}")
|
||||
print()
|
||||
|
||||
# Account information
|
||||
print("🏢 Account Information")
|
||||
print("-" * 30)
|
||||
account_info = monitor.get_account_info()
|
||||
if account_info.get("success"):
|
||||
account = account_info["result"]
|
||||
print(f"Name: {account.get('name', 'N/A')}")
|
||||
print(f"Type: {account.get('type', 'N/A')}")
|
||||
print(f"Created: {account.get('created_on', 'N/A')}")
|
||||
else:
|
||||
print("Failed to retrieve account information")
|
||||
print()
|
||||
|
||||
# Zones overview
|
||||
print("🌐 Zones Overview")
|
||||
print("-" * 30)
|
||||
zones = monitor.get_zones()
|
||||
zone_data = []
|
||||
for zone in zones[:10]: # Limit to first 10 zones
|
||||
zone_data.append(
|
||||
{
|
||||
"Name": zone.get("name", "N/A"),
|
||||
"Status": zone.get("status", "N/A"),
|
||||
"Plan": zone.get("plan", {}).get("name", "N/A"),
|
||||
"Development": zone.get("development_mode", "N/A"),
|
||||
}
|
||||
)
|
||||
|
||||
print(format_table(zone_data, ["Name", "Status", "Plan", "Development"]))
|
||||
print(f"Total zones: {len(zones)}")
|
||||
print()
|
||||
|
||||
# DNS Records (for first zone)
|
||||
dns_records = []
|
||||
waf_rules = []
|
||||
|
||||
if zones:
|
||||
first_zone = zones[0]
|
||||
print("📋 DNS Records (First Zone)")
|
||||
print("-" * 30)
|
||||
dns_records = monitor.get_dns_records(first_zone["id"])
|
||||
dns_data = []
|
||||
for record in dns_records[:15]: # Limit to first 15 records
|
||||
dns_data.append(
|
||||
{
|
||||
"Type": record.get("type", "N/A"),
|
||||
"Name": record.get("name", "N/A"),
|
||||
"Content": record.get("content", "N/A")[:40] + "..."
|
||||
if len(record.get("content", "")) > 40
|
||||
else record.get("content", "N/A"),
|
||||
}
|
||||
)
|
||||
|
||||
print(format_table(dns_data, ["Type", "Name", "Content"]))
|
||||
print(f"Total DNS records: {len(dns_records)}")
|
||||
print()
|
||||
|
||||
# Tunnels
|
||||
print("🔗 Cloudflare Tunnels")
|
||||
print("-" * 30)
|
||||
tunnels = monitor.get_tunnels()
|
||||
tunnel_data = []
|
||||
for tunnel in tunnels:
|
||||
tunnel_data.append(
|
||||
{
|
||||
"Name": tunnel.get("name", "N/A"),
|
||||
"Status": tunnel.get("status", "N/A"),
|
||||
"Connections": len(tunnel.get("connections", [])),
|
||||
}
|
||||
)
|
||||
|
||||
print(format_table(tunnel_data, ["Name", "Status", "Connections"]))
|
||||
print(f"Total tunnels: {len(tunnels)}")
|
||||
print()
|
||||
|
||||
# WAF Rules (for first zone)
|
||||
if zones:
|
||||
first_zone = zones[0]
|
||||
print("🛡️ WAF Rules (First Zone)")
|
||||
print("-" * 30)
|
||||
waf_rules = monitor.get_waf_rules(first_zone["id"])
|
||||
waf_data = []
|
||||
for rule in waf_rules[:10]: # Limit to first 10 rules
|
||||
waf_data.append(
|
||||
{
|
||||
"ID": rule.get("id", "N/A"),
|
||||
"Description": rule.get("description", "N/A")[:50] + "..."
|
||||
if len(rule.get("description", "")) > 50
|
||||
else rule.get("description", "N/A"),
|
||||
"Mode": rule.get("mode", "N/A"),
|
||||
}
|
||||
)
|
||||
|
||||
print(format_table(waf_data, ["ID", "Description", "Mode"]))
|
||||
print(f"Total WAF rules: {len(waf_rules)}")
|
||||
print()
|
||||
|
||||
# Summary
|
||||
print("📊 Summary")
|
||||
print("-" * 30)
|
||||
print(f"Zones: {len(zones)}")
|
||||
print(f"Tunnels: {len(tunnels)}")
|
||||
if zones:
|
||||
print(f"DNS Records (first zone): {len(dns_records)}")
|
||||
print(f"WAF Rules (first zone): {len(waf_rules)}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error: {e}")
|
||||
print("Please ensure your Cloudflare credentials are properly configured.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
221
scripts/setup_credentials.py
Normal file
221
scripts/setup_credentials.py
Normal file
@@ -0,0 +1,221 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cloudflare Credential Setup Wizard
|
||||
Interactive script to guide users through configuring Cloudflare API credentials
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def validate_api_token(token):
|
||||
"""Validate Cloudflare API token format"""
|
||||
# Cloudflare API tokens are typically 40+ characters
|
||||
return len(token.strip()) >= 40
|
||||
|
||||
|
||||
def validate_account_id(account_id):
|
||||
"""Validate Cloudflare Account ID format"""
|
||||
# Account IDs are typically 32-character hex strings
|
||||
return re.match(r"^[a-f0-9]{32}$", account_id.strip(), re.IGNORECASE) is not None
|
||||
|
||||
|
||||
def validate_zone_id(zone_id):
|
||||
"""Validate Cloudflare Zone ID format"""
|
||||
# Zone IDs are also 32-character hex strings
|
||||
return re.match(r"^[a-f0-9]{32}$", zone_id.strip(), re.IGNORECASE) is not None
|
||||
|
||||
|
||||
def get_input(prompt, validation_func=None, secret=False):
|
||||
"""Get validated user input"""
|
||||
while True:
|
||||
try:
|
||||
if secret:
|
||||
import getpass
|
||||
|
||||
value = getpass.getpass(prompt)
|
||||
else:
|
||||
value = input(prompt)
|
||||
|
||||
if validation_func:
|
||||
if validation_func(value):
|
||||
return value
|
||||
else:
|
||||
print("❌ Invalid format. Please try again.")
|
||||
else:
|
||||
return value
|
||||
except KeyboardInterrupt:
|
||||
print("\n\nSetup cancelled.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def create_env_file(env_vars):
|
||||
"""Create or update .env file with credentials"""
|
||||
env_path = Path(".env")
|
||||
|
||||
# Read existing .env if it exists
|
||||
existing_vars = {}
|
||||
if env_path.exists():
|
||||
with open(env_path, "r") as f:
|
||||
for line in f:
|
||||
if line.strip() and not line.startswith("#") and "=" in line:
|
||||
key, value = line.strip().split("=", 1)
|
||||
existing_vars[key] = value
|
||||
|
||||
# Update with new values
|
||||
existing_vars.update(env_vars)
|
||||
|
||||
# Write back
|
||||
with open(env_path, "w") as f:
|
||||
f.write("# OpenCode Environment Variables\n")
|
||||
f.write("# Generated by setup_credentials.py\n")
|
||||
f.write("# IMPORTANT: Never commit this file to git\n\n")
|
||||
|
||||
# Write Cloudflare section
|
||||
f.write(
|
||||
"# ============================================================================\n"
|
||||
)
|
||||
f.write("# CLOUDFLARE API CONFIGURATION\n")
|
||||
f.write(
|
||||
"# ============================================================================\n"
|
||||
)
|
||||
|
||||
for key, value in env_vars.items():
|
||||
f.write(f'{key}="{value}"\n')
|
||||
|
||||
f.write("\n")
|
||||
|
||||
# Preserve other sections if they exist
|
||||
sections = {
|
||||
"GITHUB": [k for k in existing_vars.keys() if k.startswith("GITHUB")],
|
||||
"GITLAB": [k for k in existing_vars.keys() if k.startswith("GITLAB")],
|
||||
"OTHER": [
|
||||
k
|
||||
for k in existing_vars.keys()
|
||||
if k not in env_vars and not k.startswith(("GITHUB", "GITLAB"))
|
||||
],
|
||||
}
|
||||
|
||||
for section_name, keys in sections.items():
|
||||
if keys:
|
||||
f.write(
|
||||
f"# ============================================================================\n"
|
||||
)
|
||||
f.write(f"# {section_name} CONFIGURATION\n")
|
||||
f.write(
|
||||
f"# ============================================================================\n"
|
||||
)
|
||||
for key in keys:
|
||||
f.write(f'{key}="{existing_vars[key]}"\n')
|
||||
f.write("\n")
|
||||
|
||||
return env_path
|
||||
|
||||
|
||||
def main():
|
||||
print("🚀 Cloudflare Credential Setup Wizard")
|
||||
print("=" * 50)
|
||||
print()
|
||||
|
||||
print("This wizard will help you configure your Cloudflare API credentials.")
|
||||
print("You'll need:")
|
||||
print("1. Cloudflare API Token (with appropriate permissions)")
|
||||
print("2. Cloudflare Account ID")
|
||||
print("3. Optional: Zone ID for specific domain management")
|
||||
print()
|
||||
|
||||
# Check if we're in the right directory
|
||||
current_dir = Path.cwd()
|
||||
if "cloudflare" not in str(current_dir):
|
||||
print("⚠️ Warning: This script should be run from the cloudflare directory")
|
||||
print(f" Current directory: {current_dir}")
|
||||
proceed = get_input("Continue anyway? (y/n): ")
|
||||
if proceed.lower() != "y":
|
||||
print(
|
||||
"Please navigate to the cloudflare directory and run this script again."
|
||||
)
|
||||
return
|
||||
|
||||
# Collect credentials
|
||||
print("\n🔐 Cloudflare API Configuration")
|
||||
print("-" * 30)
|
||||
|
||||
# API Token
|
||||
print("\n📋 Step 1: Cloudflare API Token")
|
||||
print("Get your token from: https://dash.cloudflare.com/profile/api-tokens")
|
||||
print("Required permissions: Zone:DNS:Edit, Zone:Page Rules:Edit, Account:Read")
|
||||
api_token = get_input(
|
||||
"API Token: ", validation_func=validate_api_token, secret=True
|
||||
)
|
||||
|
||||
# Account ID
|
||||
print("\n🏢 Step 2: Cloudflare Account ID")
|
||||
print("Find your Account ID in the Cloudflare dashboard sidebar")
|
||||
print("Format: 32-character hex string (e.g., 1a2b3c4d5e6f7g8h9i0j1k2l3m4n5o6p)")
|
||||
account_id = get_input("Account ID: ", validation_func=validate_account_id)
|
||||
|
||||
# Zone ID (optional)
|
||||
print("\n🌐 Step 3: Zone ID (Optional)")
|
||||
print("If you want to manage a specific domain, provide its Zone ID")
|
||||
print("Leave blank to skip")
|
||||
zone_id = get_input(
|
||||
"Zone ID (optional): ",
|
||||
validation_func=lambda x: x.strip() == "" or validate_zone_id(x),
|
||||
)
|
||||
|
||||
# Prepare environment variables
|
||||
env_vars = {"CLOUDFLARE_API_TOKEN": api_token, "CLOUDFLARE_ACCOUNT_ID": account_id}
|
||||
|
||||
if zone_id.strip():
|
||||
env_vars["CLOUDFLARE_ZONE_ID"] = zone_id
|
||||
|
||||
# Create .env file
|
||||
print("\n💾 Saving credentials...")
|
||||
env_path = create_env_file(env_vars)
|
||||
|
||||
# Set file permissions
|
||||
env_path.chmod(0o600) # Only user read/write
|
||||
|
||||
print(f"✅ Credentials saved to: {env_path}")
|
||||
print("🔒 File permissions set to 600 (owner read/write only)")
|
||||
|
||||
# Test configuration (basic validation only - no external dependencies)
|
||||
print("\n🧪 Validating credentials...")
|
||||
|
||||
# Basic format validation
|
||||
if validate_api_token(api_token) and validate_account_id(account_id):
|
||||
print("✅ Credential formats are valid")
|
||||
print("⚠️ Note: Full API connectivity test requires 'requests' module")
|
||||
print(" Install with: pip install requests")
|
||||
else:
|
||||
print("❌ Credential validation failed")
|
||||
print(" Please check your inputs and try again")
|
||||
|
||||
# Final instructions
|
||||
print("\n🎉 Setup Complete!")
|
||||
print("=" * 50)
|
||||
print("\nNext steps:")
|
||||
print("1. Source the environment file:")
|
||||
print(" source .env")
|
||||
print("\n2. Test Terraform configuration:")
|
||||
print(" cd terraform && terraform init && terraform plan")
|
||||
print("\n3. Deploy infrastructure:")
|
||||
print(" terraform apply")
|
||||
print("\n4. Start MCP servers:")
|
||||
print(" Check MCP_GUIDE.md for server startup instructions")
|
||||
print("\n📚 Documentation:")
|
||||
print("- USAGE_GUIDE.md - Complete usage instructions")
|
||||
print("- DEPLOYMENT_GUIDE.md - Deployment procedures")
|
||||
print("- MCP_GUIDE.md - MCP server management")
|
||||
|
||||
# Security reminder
|
||||
print("\n🔐 Security Reminder:")
|
||||
print("- Never commit .env to version control")
|
||||
print("- Use .gitignore to exclude .env files")
|
||||
print("- Consider using environment-specific .env files (.env.production, etc.)")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
190
scripts/setup_credentials.sh
Normal file
190
scripts/setup_credentials.sh
Normal file
@@ -0,0 +1,190 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Cloudflare Credential Setup Script
|
||||
# Interactive script to configure Cloudflare API credentials
|
||||
|
||||
set -e
|
||||
|
||||
echo "🚀 Cloudflare Credential Setup Wizard"
|
||||
echo "=================================================="
|
||||
echo
|
||||
|
||||
echo "This script will help you configure your Cloudflare API credentials."
|
||||
echo "You'll need:"
|
||||
echo "1. Cloudflare API Token (with appropriate permissions)"
|
||||
echo "2. Cloudflare Account ID"
|
||||
echo "3. Optional: Zone ID for specific domain management"
|
||||
echo
|
||||
|
||||
# Check if we're in the right directory
|
||||
if [[ ! "$PWD" =~ "cloudflare" ]]; then
|
||||
echo "⚠️ Warning: This script should be run from the cloudflare directory"
|
||||
echo " Current directory: $PWD"
|
||||
read -p "Continue anyway? (y/n): " -n 1 -r
|
||||
echo
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
echo "Please navigate to the cloudflare directory and run this script again."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Function to validate API token format
|
||||
validate_api_token() {
|
||||
local token="$1"
|
||||
# Cloudflare API tokens are typically 40+ characters
|
||||
[[ ${#token} -ge 40 ]]
|
||||
}
|
||||
|
||||
# Function to validate Account ID format
|
||||
validate_account_id() {
|
||||
local account_id="$1"
|
||||
# Account IDs are 32-character hex strings
|
||||
[[ "$account_id" =~ ^[a-f0-9]{32}$ ]]
|
||||
}
|
||||
|
||||
# Function to validate Zone ID format
|
||||
validate_zone_id() {
|
||||
local zone_id="$1"
|
||||
# Zone IDs are 32-character hex strings
|
||||
[[ "$zone_id" =~ ^[a-f0-9]{32}$ ]]
|
||||
}
|
||||
|
||||
# Function to get validated input
|
||||
get_validated_input() {
|
||||
local prompt="$1"
|
||||
local validation_func="$2"
|
||||
local secret="$3"
|
||||
|
||||
while true; do
|
||||
if [[ "$secret" == "true" ]]; then
|
||||
read -s -p "$prompt" value
|
||||
echo
|
||||
else
|
||||
read -p "$prompt" value
|
||||
fi
|
||||
|
||||
if [[ -n "$validation_func" ]]; then
|
||||
if $validation_func "$value"; then
|
||||
echo "$value"
|
||||
return
|
||||
else
|
||||
echo "❌ Invalid format. Please try again."
|
||||
fi
|
||||
else
|
||||
echo "$value"
|
||||
return
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Collect credentials
|
||||
echo "🔐 Cloudflare API Configuration"
|
||||
echo "------------------------------"
|
||||
echo
|
||||
|
||||
# API Token
|
||||
echo "📋 Step 1: Cloudflare API Token"
|
||||
echo "Get your token from: https://dash.cloudflare.com/profile/api-tokens"
|
||||
echo "Required permissions: Zone:DNS:Edit, Zone:Page Rules:Edit, Account:Read"
|
||||
API_TOKEN=$(get_validated_input "API Token: " validate_api_token true)
|
||||
|
||||
# Account ID
|
||||
echo
|
||||
echo "🏢 Step 2: Cloudflare Account ID"
|
||||
echo "Find your Account ID in the Cloudflare dashboard sidebar"
|
||||
echo "Format: 32-character hex string (e.g., 1a2b3c4d5e6f7g8h9i0j1k2l3m4n5o6p)"
|
||||
ACCOUNT_ID=$(get_validated_input "Account ID: " validate_account_id false)
|
||||
|
||||
# Zone ID (optional)
|
||||
echo
|
||||
echo "🌐 Step 3: Zone ID (Optional)"
|
||||
echo "If you want to manage a specific domain, provide its Zone ID"
|
||||
echo "Leave blank to skip"
|
||||
ZONE_ID=$(get_validated_input "Zone ID (optional): " "[[ -z \"\$1\" ]] || validate_zone_id \"\$1\"" false)
|
||||
|
||||
# Create .env file
|
||||
echo
|
||||
echo "💾 Saving credentials..."
|
||||
|
||||
# Read existing .env if it exists
|
||||
ENV_CONTENT=""
|
||||
if [[ -f ".env" ]]; then
|
||||
# Preserve existing non-Cloudflare variables
|
||||
while IFS= read -r line; do
|
||||
if [[ ! "$line" =~ ^CLOUDFLARE_ ]] && [[ ! "$line" =~ ^#.*CLOUDFLARE ]]; then
|
||||
ENV_CONTENT="$ENV_CONTENT$line\n"
|
||||
fi
|
||||
done < ".env"
|
||||
fi
|
||||
|
||||
# Create new .env content
|
||||
cat > .env << EOF
|
||||
# OpenCode Environment Variables
|
||||
# Generated by setup_credentials.sh
|
||||
# IMPORTANT: Never commit this file to git
|
||||
|
||||
# ============================================================================
|
||||
# CLOUDFLARE API CONFIGURATION
|
||||
# ============================================================================
|
||||
CLOUDFLARE_API_TOKEN="$API_TOKEN"
|
||||
CLOUDFLARE_ACCOUNT_ID="$ACCOUNT_ID"
|
||||
EOF
|
||||
|
||||
# Add Zone ID if provided
|
||||
if [[ -n "$ZONE_ID" ]]; then
|
||||
echo "CLOUDFLARE_ZONE_ID=\"$ZONE_ID\"" >> .env
|
||||
fi
|
||||
|
||||
# Add preserved content
|
||||
if [[ -n "$ENV_CONTENT" ]]; then
|
||||
echo >> .env
|
||||
echo "$ENV_CONTENT" >> .env
|
||||
fi
|
||||
|
||||
# Set secure permissions
|
||||
chmod 600 .env
|
||||
|
||||
echo "✅ Credentials saved to: .env"
|
||||
echo "🔒 File permissions set to 600 (owner read/write only)"
|
||||
|
||||
# Basic validation
|
||||
echo
|
||||
echo "🧪 Validating credentials..."
|
||||
if validate_api_token "$API_TOKEN" && validate_account_id "$ACCOUNT_ID"; then
|
||||
echo "✅ Credential formats are valid"
|
||||
echo "⚠️ Note: Full API connectivity test requires curl or python requests"
|
||||
else
|
||||
echo "❌ Credential validation failed"
|
||||
echo " Please check your inputs and try again"
|
||||
fi
|
||||
|
||||
# Final instructions
|
||||
echo
|
||||
echo "🎉 Setup Complete!"
|
||||
echo "=================================================="
|
||||
echo
|
||||
echo "Next steps:"
|
||||
echo "1. Source the environment file:"
|
||||
echo " source .env"
|
||||
echo
|
||||
echo "2. Test Terraform configuration:"
|
||||
echo " cd terraform && terraform init && terraform plan"
|
||||
echo
|
||||
echo "3. Deploy infrastructure:"
|
||||
echo " terraform apply"
|
||||
echo
|
||||
echo "4. Start MCP servers:"
|
||||
echo " Check MCP_GUIDE.md for server startup instructions"
|
||||
echo
|
||||
echo "📚 Documentation:"
|
||||
echo "- USAGE_GUIDE.md - Complete usage instructions"
|
||||
echo "- DEPLOYMENT_GUIDE.md - Deployment procedures"
|
||||
echo "- MCP_GUIDE.md - MCP server management"
|
||||
echo
|
||||
echo "🔐 Security Reminder:"
|
||||
echo "- Never commit .env to version control"
|
||||
echo "- Use .gitignore to exclude .env files"
|
||||
echo "- Consider using environment-specific .env files (.env.production, etc.)"
|
||||
|
||||
# Make script executable
|
||||
chmod +x "$0"
|
||||
309
scripts/terraform_state_manager.py
Normal file
309
scripts/terraform_state_manager.py
Normal file
@@ -0,0 +1,309 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Terraform State Backup and Recovery Manager
|
||||
Automated state management with versioning and rollback capabilities
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import shutil
|
||||
import hashlib
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
import argparse
|
||||
|
||||
|
||||
class TerraformStateManager:
|
||||
"""Manage Terraform state backups and recovery"""
|
||||
|
||||
def __init__(
|
||||
self, terraform_dir: str = "terraform", backup_dir: str = "terraform_backups"
|
||||
):
|
||||
self.terraform_dir = Path(terraform_dir)
|
||||
self.backup_dir = Path(backup_dir)
|
||||
self.state_file = self.terraform_dir / "terraform.tfstate"
|
||||
self.backup_dir.mkdir(exist_ok=True)
|
||||
|
||||
def create_backup(self, description: str = "", auto_backup: bool = True) -> str:
|
||||
"""Create a backup of the current Terraform state"""
|
||||
if not self.state_file.exists():
|
||||
return "No state file found to backup"
|
||||
|
||||
# Generate backup filename with timestamp
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
backup_filename = f"state_backup_{timestamp}.tfstate"
|
||||
backup_path = self.backup_dir / backup_filename
|
||||
|
||||
# Copy state file
|
||||
shutil.copy2(self.state_file, backup_path)
|
||||
|
||||
# Create metadata file
|
||||
metadata = {
|
||||
"timestamp": timestamp,
|
||||
"description": description,
|
||||
"auto_backup": auto_backup,
|
||||
"file_size": os.path.getsize(backup_path),
|
||||
"file_hash": self._calculate_file_hash(backup_path),
|
||||
}
|
||||
|
||||
metadata_path = backup_path.with_suffix(".json")
|
||||
with open(metadata_path, "w") as f:
|
||||
json.dump(metadata, f, indent=2)
|
||||
|
||||
return f"Backup created: {backup_filename}"
|
||||
|
||||
def list_backups(self) -> List[Dict]:
|
||||
"""List all available backups"""
|
||||
backups = []
|
||||
|
||||
for file in self.backup_dir.glob("state_backup_*.tfstate"):
|
||||
metadata_file = file.with_suffix(".json")
|
||||
|
||||
backup_info = {
|
||||
"filename": file.name,
|
||||
"path": str(file),
|
||||
"size": file.stat().st_size,
|
||||
"modified": datetime.fromtimestamp(file.stat().st_mtime),
|
||||
}
|
||||
|
||||
if metadata_file.exists():
|
||||
with open(metadata_file, "r") as f:
|
||||
backup_info.update(json.load(f))
|
||||
|
||||
backups.append(backup_info)
|
||||
|
||||
# Sort by modification time (newest first)
|
||||
backups.sort(key=lambda x: x["modified"], reverse=True)
|
||||
return backups
|
||||
|
||||
def restore_backup(self, backup_filename: str, dry_run: bool = False) -> str:
|
||||
"""Restore a specific backup"""
|
||||
backup_path = self.backup_dir / backup_filename
|
||||
|
||||
if not backup_path.exists():
|
||||
return f"Backup file not found: {backup_filename}"
|
||||
|
||||
# Create backup of current state before restore
|
||||
if self.state_file.exists() and not dry_run:
|
||||
self.create_backup("Pre-restore backup", auto_backup=True)
|
||||
|
||||
if dry_run:
|
||||
return f"Dry run: Would restore {backup_filename}"
|
||||
|
||||
# Perform restore
|
||||
shutil.copy2(backup_path, self.state_file)
|
||||
|
||||
return f"State restored from: {backup_filename}"
|
||||
|
||||
def cleanup_old_backups(
|
||||
self, keep_days: int = 30, keep_count: int = 10
|
||||
) -> List[str]:
|
||||
"""Clean up old backups based on age and count"""
|
||||
backups = self.list_backups()
|
||||
|
||||
if not backups:
|
||||
return ["No backups found to clean up"]
|
||||
|
||||
cutoff_date = datetime.now() - timedelta(days=keep_days)
|
||||
backups_to_delete = []
|
||||
|
||||
# Delete backups older than keep_days
|
||||
for backup in backups:
|
||||
if backup["modified"] < cutoff_date:
|
||||
backups_to_delete.append(backup)
|
||||
|
||||
# If we have more than keep_count backups, delete the oldest ones
|
||||
if len(backups) > keep_count:
|
||||
# Keep the newest keep_count backups
|
||||
backups_to_keep = backups[:keep_count]
|
||||
backups_to_delete.extend([b for b in backups if b not in backups_to_keep])
|
||||
|
||||
# Remove duplicates
|
||||
backups_to_delete = list({b["filename"]: b for b in backups_to_delete}.values())
|
||||
|
||||
deleted_files = []
|
||||
for backup in backups_to_delete:
|
||||
try:
|
||||
# Delete state file
|
||||
state_file = Path(backup["path"])
|
||||
if state_file.exists():
|
||||
state_file.unlink()
|
||||
deleted_files.append(state_file.name)
|
||||
|
||||
# Delete metadata file
|
||||
metadata_file = state_file.with_suffix(".json")
|
||||
if metadata_file.exists():
|
||||
metadata_file.unlink()
|
||||
deleted_files.append(metadata_file.name)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error deleting {backup['filename']}: {e}")
|
||||
|
||||
return deleted_files
|
||||
|
||||
def verify_backup_integrity(self, backup_filename: str) -> Dict[str, bool]:
|
||||
"""Verify the integrity of a backup"""
|
||||
backup_path = self.backup_dir / backup_filename
|
||||
metadata_path = backup_path.with_suffix(".json")
|
||||
|
||||
if not backup_path.exists():
|
||||
return {"exists": False, "metadata_exists": False, "integrity": False}
|
||||
|
||||
if not metadata_path.exists():
|
||||
return {"exists": True, "metadata_exists": False, "integrity": False}
|
||||
|
||||
# Check file size and hash
|
||||
with open(metadata_path, "r") as f:
|
||||
metadata = json.load(f)
|
||||
|
||||
current_size = backup_path.stat().st_size
|
||||
current_hash = self._calculate_file_hash(backup_path)
|
||||
|
||||
size_matches = current_size == metadata.get("file_size", 0)
|
||||
hash_matches = current_hash == metadata.get("file_hash", "")
|
||||
|
||||
return {
|
||||
"exists": True,
|
||||
"metadata_exists": True,
|
||||
"size_matches": size_matches,
|
||||
"hash_matches": hash_matches,
|
||||
"integrity": size_matches and hash_matches,
|
||||
}
|
||||
|
||||
def get_state_statistics(self) -> Dict:
|
||||
"""Get statistics about current state and backups"""
|
||||
backups = self.list_backups()
|
||||
|
||||
stats = {
|
||||
"current_state_exists": self.state_file.exists(),
|
||||
"current_state_size": self.state_file.stat().st_size
|
||||
if self.state_file.exists()
|
||||
else 0,
|
||||
"backup_count": len(backups),
|
||||
"oldest_backup": min([b["modified"] for b in backups]) if backups else None,
|
||||
"newest_backup": max([b["modified"] for b in backups]) if backups else None,
|
||||
"total_backup_size": sum(b["size"] for b in backups),
|
||||
"backups_with_issues": [],
|
||||
}
|
||||
|
||||
# Check backup integrity
|
||||
for backup in backups:
|
||||
integrity = self.verify_backup_integrity(backup["filename"])
|
||||
if not integrity["integrity"]:
|
||||
stats["backups_with_issues"].append(
|
||||
{"filename": backup["filename"], "integrity": integrity}
|
||||
)
|
||||
|
||||
return stats
|
||||
|
||||
def _calculate_file_hash(self, file_path: Path) -> str:
|
||||
"""Calculate SHA256 hash of a file"""
|
||||
hasher = hashlib.sha256()
|
||||
with open(file_path, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(4096), b""):
|
||||
hasher.update(chunk)
|
||||
return hasher.hexdigest()
|
||||
|
||||
|
||||
def main():
|
||||
"""Command-line interface for Terraform state management"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Terraform State Backup and Recovery Manager"
|
||||
)
|
||||
parser.add_argument(
|
||||
"action",
|
||||
choices=["backup", "list", "restore", "cleanup", "stats", "verify"],
|
||||
help="Action to perform",
|
||||
)
|
||||
parser.add_argument("--filename", help="Backup filename for restore/verify")
|
||||
parser.add_argument("--description", help="Description for backup")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Dry run mode")
|
||||
parser.add_argument(
|
||||
"--keep-days", type=int, default=30, help="Days to keep backups"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--keep-count", type=int, default=10, help="Number of backups to keep"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--terraform-dir", default="terraform", help="Terraform directory"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--backup-dir", default="terraform_backups", help="Backup directory"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
manager = TerraformStateManager(args.terraform_dir, args.backup_dir)
|
||||
|
||||
if args.action == "backup":
|
||||
result = manager.create_backup(
|
||||
args.description or "Manual backup", auto_backup=False
|
||||
)
|
||||
print(f"✅ {result}")
|
||||
|
||||
elif args.action == "list":
|
||||
backups = manager.list_backups()
|
||||
print("📋 Available Backups:")
|
||||
print("-" * 80)
|
||||
for backup in backups:
|
||||
print(f"📁 {backup['filename']}")
|
||||
print(f" Size: {backup['size']:,} bytes")
|
||||
print(f" Modified: {backup['modified'].strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
if "description" in backup:
|
||||
print(f" Description: {backup['description']}")
|
||||
print()
|
||||
|
||||
elif args.action == "restore":
|
||||
if not args.filename:
|
||||
print("❌ Error: --filename argument required for restore")
|
||||
return
|
||||
|
||||
result = manager.restore_backup(args.filename, args.dry_run)
|
||||
print(f"🔁 {result}")
|
||||
|
||||
elif args.action == "cleanup":
|
||||
deleted = manager.cleanup_old_backups(args.keep_days, args.keep_count)
|
||||
if deleted:
|
||||
print("🗑️ Cleaned up backups:")
|
||||
for filename in deleted:
|
||||
print(f" - {filename}")
|
||||
else:
|
||||
print("✅ No backups needed cleanup")
|
||||
|
||||
elif args.action == "stats":
|
||||
stats = manager.get_state_statistics()
|
||||
print("📊 Terraform State Statistics")
|
||||
print("-" * 40)
|
||||
print(
|
||||
f"Current state exists: {'✅' if stats['current_state_exists'] else '❌'}"
|
||||
)
|
||||
print(f"Current state size: {stats['current_state_size']:,} bytes")
|
||||
print(f"Backup count: {stats['backup_count']}")
|
||||
if stats["oldest_backup"]:
|
||||
print(f"Oldest backup: {stats['oldest_backup'].strftime('%Y-%m-%d')}")
|
||||
print(f"Newest backup: {stats['newest_backup'].strftime('%Y-%m-%d')}")
|
||||
print(f"Total backup size: {stats['total_backup_size']:,} bytes")
|
||||
|
||||
if stats["backups_with_issues"]:
|
||||
print(f"\n⚠️ Backups with issues: {len(stats['backups_with_issues'])}")
|
||||
for issue in stats["backups_with_issues"]:
|
||||
print(f" - {issue['filename']}")
|
||||
|
||||
elif args.action == "verify":
|
||||
if not args.filename:
|
||||
print("❌ Error: --filename argument required for verify")
|
||||
return
|
||||
|
||||
integrity = manager.verify_backup_integrity(args.filename)
|
||||
print(f"🔍 Integrity check for {args.filename}")
|
||||
print(f" File exists: {'✅' if integrity['exists'] else '❌'}")
|
||||
print(f" Metadata exists: {'✅' if integrity['metadata_exists'] else '❌'}")
|
||||
if integrity["metadata_exists"]:
|
||||
print(f" Size matches: {'✅' if integrity['size_matches'] else '❌'}")
|
||||
print(f" Hash matches: {'✅' if integrity['hash_matches'] else '❌'}")
|
||||
print(f" Overall integrity: {'✅' if integrity['integrity'] else '❌'}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
393
scripts/waf-and-plan-invariants.sh
Normal file
393
scripts/waf-and-plan-invariants.sh
Normal file
@@ -0,0 +1,393 @@
|
||||
#!/usr/bin/env bash
|
||||
# ============================================================================
|
||||
# WAF + PLAN INVARIANTS CHECKER
|
||||
# ============================================================================
|
||||
# Enforces security+plan gating invariants for VaultMesh Cloudflare IaC.
|
||||
# Run from repo root: bash scripts/waf-and-plan-invariants.sh
|
||||
#
|
||||
# Exit codes:
|
||||
# 0 = All invariants pass
|
||||
# 1 = One or more invariants violated
|
||||
#
|
||||
# Governed by: RED-BOOK.md
|
||||
# ============================================================================
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
NC='\033[0m'
|
||||
|
||||
echo "============================================"
|
||||
echo " VaultMesh WAF + Plan Invariants Check"
|
||||
echo "============================================"
|
||||
echo ""
|
||||
|
||||
FAILED=0
|
||||
|
||||
echo "── 0. Toolchain Versions ──"
|
||||
terraform version || true
|
||||
python3 --version || true
|
||||
python3 -m pip --version || true
|
||||
python3 -m pytest --version || true
|
||||
python3 -m mcp.waf_intelligence --version || true
|
||||
|
||||
echo ""
|
||||
|
||||
echo "── 1. WAF Intel Analyzer Regression ──"
|
||||
if python3 -m pytest -q tests/test_waf_intelligence_analyzer.py; then
|
||||
echo -e "${GREEN}✓${NC} 1.1 Analyzer regression test passed"
|
||||
else
|
||||
echo -e "${RED}✗${NC} 1.1 Analyzer regression test failed"
|
||||
FAILED=1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "── 2. WAF Intel CLI Contract ──"
|
||||
|
||||
TMP_DIR="${TMPDIR:-/tmp}"
|
||||
WAF_JSON_FILE="$(mktemp -p "$TMP_DIR" waf-intel.XXXXXX.json)"
|
||||
if python3 -m mcp.waf_intelligence --file terraform/waf.tf --format json --limit 5 >"$WAF_JSON_FILE"; then
|
||||
if python3 - "$WAF_JSON_FILE" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
|
||||
path = sys.argv[1]
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
payload = json.load(f)
|
||||
|
||||
insights = payload.get("insights")
|
||||
if not isinstance(insights, list):
|
||||
raise SystemExit("waf_intel: insights is not a list")
|
||||
|
||||
if insights:
|
||||
raise SystemExit(f"waf_intel: expected 0 insights, got {len(insights)}")
|
||||
|
||||
print("ok")
|
||||
PY
|
||||
then
|
||||
echo -e "${GREEN}✓${NC} 2.1 WAF Intel JSON parses and insights are empty"
|
||||
else
|
||||
echo -e "${RED}✗${NC} 2.1 WAF Intel JSON contract violated"
|
||||
cat "$WAF_JSON_FILE"
|
||||
FAILED=1
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}✗${NC} 2.1 WAF Intel CLI failed"
|
||||
FAILED=1
|
||||
fi
|
||||
rm -f "$WAF_JSON_FILE"
|
||||
|
||||
echo ""
|
||||
echo "── 3. Terraform Format + Validate + Plan Gates ──"
|
||||
|
||||
cd terraform
|
||||
|
||||
if terraform fmt -check -recursive >/dev/null 2>&1; then
|
||||
echo -e "${GREEN}✓${NC} 3.1 Terraform formatting OK"
|
||||
else
|
||||
echo -e "${RED}✗${NC} 3.1 Terraform formatting required"
|
||||
echo " Run: cd terraform && terraform fmt -recursive"
|
||||
FAILED=1
|
||||
fi
|
||||
|
||||
terraform init -backend=false -input=false >/dev/null 2>&1
|
||||
if terraform validate -no-color >/dev/null 2>&1; then
|
||||
echo -e "${GREEN}✓${NC} 3.2 Terraform validate OK"
|
||||
else
|
||||
echo -e "${RED}✗${NC} 3.2 Terraform validate failed"
|
||||
terraform validate -no-color
|
||||
FAILED=1
|
||||
fi
|
||||
|
||||
PLAN_FREE_OUT="$(mktemp -p "$TMP_DIR" tf-plan-free.XXXXXX.out)"
|
||||
PLAN_PRO_OUT="$(mktemp -p "$TMP_DIR" tf-plan-pro.XXXXXX.out)"
|
||||
PLAN_FREE_JSON="$(mktemp -p "$TMP_DIR" tf-plan-free.XXXXXX.json)"
|
||||
PLAN_PRO_JSON="$(mktemp -p "$TMP_DIR" tf-plan-pro.XXXXXX.json)"
|
||||
rm -f "$PLAN_FREE_OUT" "$PLAN_PRO_OUT"
|
||||
|
||||
if terraform plan -no-color -input=false -lock=false -refresh=false -out="$PLAN_FREE_OUT" -var-file=assurance_free.tfvars >/dev/null; then
|
||||
if terraform show -json "$PLAN_FREE_OUT" >"$PLAN_FREE_JSON"; then
|
||||
if output="$(
|
||||
python3 - "$PLAN_FREE_JSON" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
|
||||
path = sys.argv[1]
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
payload = json.load(f)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"json parse error: {e}")
|
||||
raise SystemExit(2)
|
||||
|
||||
resource_changes = payload.get("resource_changes")
|
||||
planned_values = payload.get("planned_values")
|
||||
|
||||
if not isinstance(resource_changes, list) or not isinstance(planned_values, dict):
|
||||
print("invalid plan json: missing resource_changes[] and/or planned_values{}")
|
||||
raise SystemExit(2)
|
||||
|
||||
addresses = [
|
||||
rc.get("address", "")
|
||||
for rc in resource_changes
|
||||
if isinstance(rc, dict) and isinstance(rc.get("address"), str)
|
||||
]
|
||||
|
||||
managed_waf = sum(1 for a in addresses if a.startswith("cloudflare_ruleset.managed_waf["))
|
||||
bot_mgmt = sum(1 for a in addresses if a.startswith("cloudflare_bot_management.domains["))
|
||||
|
||||
if managed_waf != 0 or bot_mgmt != 0:
|
||||
print(f"expected managed_waf=0 bot_management=0, got managed_waf={managed_waf} bot_management={bot_mgmt}")
|
||||
for addr in sorted(
|
||||
a
|
||||
for a in addresses
|
||||
if a.startswith("cloudflare_ruleset.managed_waf[") or a.startswith("cloudflare_bot_management.domains[")
|
||||
):
|
||||
print(f"- {addr}")
|
||||
raise SystemExit(2)
|
||||
PY
|
||||
)"; then
|
||||
echo -e "${GREEN}✓${NC} 3.3 Free-plan gate OK (managed_waf=0 bot_management=0)"
|
||||
else
|
||||
echo -e "${RED}✗${NC} 3.3 Free-plan gate violated"
|
||||
if [[ -n "${output:-}" ]]; then
|
||||
echo "$output" | sed 's/^/ /'
|
||||
fi
|
||||
FAILED=1
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}✗${NC} 3.3 terraform show -json failed (free)"
|
||||
FAILED=1
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}✗${NC} 3.3 Terraform plan failed (free)"
|
||||
terraform show -no-color "$PLAN_FREE_OUT" 2>/dev/null || true
|
||||
FAILED=1
|
||||
fi
|
||||
|
||||
if terraform plan -no-color -input=false -lock=false -refresh=false -out="$PLAN_PRO_OUT" -var-file=assurance_pro.tfvars >/dev/null; then
|
||||
if terraform show -json "$PLAN_PRO_OUT" >"$PLAN_PRO_JSON"; then
|
||||
if output="$(
|
||||
python3 - "$PLAN_PRO_JSON" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
|
||||
path = sys.argv[1]
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
payload = json.load(f)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"json parse error: {e}")
|
||||
raise SystemExit(2)
|
||||
|
||||
resource_changes = payload.get("resource_changes")
|
||||
planned_values = payload.get("planned_values")
|
||||
|
||||
if not isinstance(resource_changes, list) or not isinstance(planned_values, dict):
|
||||
print("invalid plan json: missing resource_changes[] and/or planned_values{}")
|
||||
raise SystemExit(2)
|
||||
|
||||
addresses = [
|
||||
rc.get("address", "")
|
||||
for rc in resource_changes
|
||||
if isinstance(rc, dict) and isinstance(rc.get("address"), str)
|
||||
]
|
||||
|
||||
managed_waf = sum(1 for a in addresses if a.startswith("cloudflare_ruleset.managed_waf["))
|
||||
bot_mgmt = sum(1 for a in addresses if a.startswith("cloudflare_bot_management.domains["))
|
||||
|
||||
if managed_waf != 1 or bot_mgmt != 1:
|
||||
print("expected managed_waf=1 bot_management=1")
|
||||
print(f"got managed_waf={managed_waf} bot_management={bot_mgmt}")
|
||||
print("observed:")
|
||||
for addr in sorted(
|
||||
a
|
||||
for a in addresses
|
||||
if a.startswith("cloudflare_ruleset.managed_waf[") or a.startswith("cloudflare_bot_management.domains[")
|
||||
):
|
||||
print(f"- {addr}")
|
||||
raise SystemExit(2)
|
||||
PY
|
||||
)"; then
|
||||
echo -e "${GREEN}✓${NC} 3.4 Paid-plan gate OK (managed_waf=1 bot_management=1)"
|
||||
else
|
||||
echo -e "${RED}✗${NC} 3.4 Paid-plan gate violated"
|
||||
if [[ -n "${output:-}" ]]; then
|
||||
echo "$output" | sed 's/^/ /'
|
||||
fi
|
||||
FAILED=1
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}✗${NC} 3.4 terraform show -json failed (pro)"
|
||||
FAILED=1
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}✗${NC} 3.4 Terraform plan failed (pro)"
|
||||
terraform show -no-color "$PLAN_PRO_OUT" 2>/dev/null || true
|
||||
FAILED=1
|
||||
fi
|
||||
|
||||
PLAN_NEG_FREE_OUT="$(mktemp -p "$TMP_DIR" tf-plan-neg-free.XXXXXX.out)"
|
||||
PLAN_NEG_PRO_OUT="$(mktemp -p "$TMP_DIR" tf-plan-neg-pro.XXXXXX.out)"
|
||||
PLAN_NEG_FREE_JSON="$(mktemp -p "$TMP_DIR" tf-plan-neg-free.XXXXXX.json)"
|
||||
PLAN_NEG_PRO_JSON="$(mktemp -p "$TMP_DIR" tf-plan-neg-pro.XXXXXX.json)"
|
||||
rm -f "$PLAN_NEG_FREE_OUT" "$PLAN_NEG_PRO_OUT"
|
||||
|
||||
echo ""
|
||||
echo "── 4. Negative Controls (Prove the gate bites) ──"
|
||||
|
||||
if terraform plan -no-color -input=false -lock=false -refresh=false -out="$PLAN_NEG_FREE_OUT" -var-file=assurance_negative_free_should_fail.tfvars >/dev/null; then
|
||||
if terraform show -json "$PLAN_NEG_FREE_OUT" >"$PLAN_NEG_FREE_JSON"; then
|
||||
if output="$(
|
||||
python3 - "$PLAN_NEG_FREE_JSON" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
|
||||
path = sys.argv[1]
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
payload = json.load(f)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"json parse error: {e}")
|
||||
raise SystemExit(2)
|
||||
|
||||
resource_changes = payload.get("resource_changes")
|
||||
planned_values = payload.get("planned_values")
|
||||
|
||||
if not isinstance(resource_changes, list) or not isinstance(planned_values, dict):
|
||||
print("invalid plan json: missing resource_changes[] and/or planned_values{}")
|
||||
raise SystemExit(2)
|
||||
|
||||
addresses = [
|
||||
rc.get("address", "")
|
||||
for rc in resource_changes
|
||||
if isinstance(rc, dict) and isinstance(rc.get("address"), str)
|
||||
]
|
||||
|
||||
managed_waf = sum(1 for a in addresses if a.startswith("cloudflare_ruleset.managed_waf["))
|
||||
bot_mgmt = sum(1 for a in addresses if a.startswith("cloudflare_bot_management.domains["))
|
||||
|
||||
if managed_waf != 0 or bot_mgmt != 0:
|
||||
print(f"expected managed_waf=0 bot_management=0, got managed_waf={managed_waf} bot_management={bot_mgmt}")
|
||||
for addr in sorted(
|
||||
a
|
||||
for a in addresses
|
||||
if a.startswith("cloudflare_ruleset.managed_waf[") or a.startswith("cloudflare_bot_management.domains[")
|
||||
):
|
||||
print(f"- {addr}")
|
||||
raise SystemExit(2)
|
||||
|
||||
print("ok")
|
||||
PY
|
||||
)"; then
|
||||
echo -e "${RED}✗${NC} 4.1 Negative free-plan control unexpectedly passed"
|
||||
FAILED=1
|
||||
else
|
||||
if [[ "${output:-}" == *"expected managed_waf=0 bot_management=0"* ]]; then
|
||||
echo -e "${GREEN}✓${NC} 4.1 Negative free-plan control failed as expected"
|
||||
else
|
||||
echo -e "${RED}✗${NC} 4.1 Negative free-plan control failed (unexpected error)"
|
||||
if [[ -n "${output:-}" ]]; then
|
||||
echo "$output" | sed 's/^/ /'
|
||||
fi
|
||||
FAILED=1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}✗${NC} 4.1 terraform show -json failed (negative free)"
|
||||
FAILED=1
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}✗${NC} 4.1 Terraform plan failed (negative free)"
|
||||
FAILED=1
|
||||
fi
|
||||
|
||||
if terraform plan -no-color -input=false -lock=false -refresh=false -out="$PLAN_NEG_PRO_OUT" -var-file=assurance_negative_pro_should_fail.tfvars >/dev/null; then
|
||||
if terraform show -json "$PLAN_NEG_PRO_OUT" >"$PLAN_NEG_PRO_JSON"; then
|
||||
if output="$(
|
||||
python3 - "$PLAN_NEG_PRO_JSON" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
|
||||
path = sys.argv[1]
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
payload = json.load(f)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"json parse error: {e}")
|
||||
raise SystemExit(2)
|
||||
|
||||
resource_changes = payload.get("resource_changes")
|
||||
planned_values = payload.get("planned_values")
|
||||
|
||||
if not isinstance(resource_changes, list) or not isinstance(planned_values, dict):
|
||||
print("invalid plan json: missing resource_changes[] and/or planned_values{}")
|
||||
raise SystemExit(2)
|
||||
|
||||
addresses = [
|
||||
rc.get("address", "")
|
||||
for rc in resource_changes
|
||||
if isinstance(rc, dict) and isinstance(rc.get("address"), str)
|
||||
]
|
||||
|
||||
managed_waf = sum(1 for a in addresses if a.startswith("cloudflare_ruleset.managed_waf["))
|
||||
bot_mgmt = sum(1 for a in addresses if a.startswith("cloudflare_bot_management.domains["))
|
||||
|
||||
if managed_waf != 1 or bot_mgmt != 1:
|
||||
print("expected managed_waf=1 bot_management=1")
|
||||
print(f"got managed_waf={managed_waf} bot_management={bot_mgmt}")
|
||||
print("observed:")
|
||||
for addr in sorted(
|
||||
a
|
||||
for a in addresses
|
||||
if a.startswith("cloudflare_ruleset.managed_waf[") or a.startswith("cloudflare_bot_management.domains[")
|
||||
):
|
||||
print(f"- {addr}")
|
||||
raise SystemExit(2)
|
||||
|
||||
print("ok")
|
||||
PY
|
||||
)"; then
|
||||
echo -e "${RED}✗${NC} 4.2 Negative paid-plan control unexpectedly passed"
|
||||
FAILED=1
|
||||
else
|
||||
if [[ "${output:-}" == *"expected managed_waf=1 bot_management=1"* ]]; then
|
||||
echo -e "${GREEN}✓${NC} 4.2 Negative paid-plan control failed as expected"
|
||||
else
|
||||
echo -e "${RED}✗${NC} 4.2 Negative paid-plan control failed (unexpected error)"
|
||||
if [[ -n "${output:-}" ]]; then
|
||||
echo "$output" | sed 's/^/ /'
|
||||
fi
|
||||
FAILED=1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}✗${NC} 4.2 terraform show -json failed (negative pro)"
|
||||
FAILED=1
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}✗${NC} 4.2 Terraform plan failed (negative pro)"
|
||||
FAILED=1
|
||||
fi
|
||||
|
||||
rm -f "$PLAN_FREE_OUT" "$PLAN_PRO_OUT" "$PLAN_FREE_JSON" "$PLAN_PRO_JSON" "$PLAN_NEG_FREE_OUT" "$PLAN_NEG_PRO_OUT" "$PLAN_NEG_FREE_JSON" "$PLAN_NEG_PRO_JSON"
|
||||
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
echo ""
|
||||
echo "============================================"
|
||||
echo " Summary"
|
||||
echo "============================================"
|
||||
|
||||
if [[ $FAILED -gt 0 ]]; then
|
||||
echo -e "${RED}WAF + plan invariants violated. Fix before merging.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}All WAF + plan invariants pass. ✓${NC}"
|
||||
exit 0
|
||||
Reference in New Issue
Block a user