diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 000000000..8aa224b05 --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,12 @@ +{ + "permissions": { + "allow": [ + "Bash(docker-compose -f docker-compose-blue.yml up --build)", + "Bash(docker-compose -f docker-compose-blue.yml build --no-cache blue-backend)", + "Bash(docker-compose -f docker-compose-blue.yml up -d blue-backend)", + "Bash(docker exec *)", + "Bash(docker restart *)", + "Bash(curl -s -o /dev/null -w \"pending:%{http_code}\" http://localhost:8002/remediate/pending)" + ] + } +} diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..101a0a86a --- /dev/null +++ b/.dockerignore @@ -0,0 +1,34 @@ +# Python +venv/ +.venv/ +env/ +__pycache__/ +*.pyc +*.pyo +*.pyd +*.egg-info/ +.pytest_cache/ +.mypy_cache/ + +# Node / frontend +node_modules/ +*/frontend/dist/ +*/frontend/node_modules/ +.vite/ + +# Secrets & environment +.env +.env.* +!.env.example + +# VCS / editors +.git/ +.gitignore +.idea/ +.vscode/ +.DS_Store + +# OS / build artifacts +*.log +build/ +dist/ diff --git a/.env.example b/.env.example new file mode 100644 index 000000000..ce0da4731 --- /dev/null +++ b/.env.example @@ -0,0 +1,27 @@ +# Copy this file to .env and fill in real values. + +# ── Azure OpenAI (GPT-4o) — powers all Red Agent LLM calls ── +AZURE_OPENAI_ENDPOINT=https://abineshbalasubramaniyam-resource.cognitiveservices.azure.com/ +AZURE_OPENAI_API_KEY=your_azure_api_key_here +AZURE_OPENAI_API_VERSION=2024-12-01-preview +AZURE_OPENAI_MODEL=gpt-4o + +# CVE feed (NVD is used by default, no key required) +CVE_FEED_URL= +CVE_API_KEY= + +# Agent ports +RED_AGENT_PORT=8001 +BLUE_AGENT_PORT=8002 +AUTH_SERVICE_PORT=8003 + +# Red Arsenal MCP server (SSE transport) +RED_ARSENAL_HOST=0.0.0.0 +RED_ARSENAL_PORT=8765 + +# Recon agent tuning +MAX_RECON_ITERATIONS=6 +RECON_TOOL_TIMEOUT=120 + +# Logging +LOG_LEVEL=INFO diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..404256133 --- /dev/null +++ b/.gitignore @@ -0,0 +1,19 @@ +# Python +__pycache__/ +*.pyc +venv/ + +# Frontend (Vite + React) +node_modules/ +dist/ +*.tsbuildinfo + +# Env & secrets +.env +!.env.example + +# Logs +*.log + +# macOS +.DS_Store diff --git a/README.md b/README.md index c5c886b3e..c753f4f29 100644 --- a/README.md +++ b/README.md @@ -1,86 +1,289 @@ -# HackToFuture 4.0 — Template +# HackToFuture 4.0 — Team CO3 -Welcome to your official HackToFuture 4 repository. - -This repository template will be used for development, tracking progress, and final submission of your project. Ensure that all work is committed here within the allowed hackathon duration. +> **HTF 4.0 ARENA — Red vs Blue AI Cyber Battleground** +> An autonomous AI-powered cybersecurity simulation where a Red agent attacks and a Blue agent defends in real time. --- -### Instructions for the teams: +## Problem Statement + +Modern cybersecurity teams struggle to train for real-world attack-defense scenarios. Traditional red team / blue team exercises are expensive, slow, and require large specialist teams. There is no accessible, automated platform where both sides operate simultaneously and the defense team can see threats as they emerge and respond in real time. -- Fork the Repository and name the forked repo in this convention: hacktofuture4-team_id (for eg: hacktofuture4-A01) +**Who is affected:** Security teams, SOC analysts, cybersecurity trainees, and organizations running internal audits. --- -## Rules +## Proposed Solution -- Work must be done ONLY in the forked repository -- Only Four Contributors are allowed. -- After 36 hours, Please make PR to the Main Repository. A Form will be sent to fill the required information. -- Do not copy code from other teams -- All commits must be from individual GitHub accounts -- Please provide meaningful commits for tracking. -- Do not share your repository with other teams -- Final submission must be pushed before the deadline -- Any violation may lead to disqualification +**HTF Arena** is a live Red vs Blue cybersecurity simulation platform powered by autonomous AI agents: + +- The **Red Agent** (attacker) runs a 3-agent CrewAI crew — an Infrastructure Auditor, a Risk Analyst, and a Technical Verification Engineer — that scans a target, assesses risk, and produces a detailed security report. +- The **Blue Agent** (defender) receives the Red report in real time, runs it through an IDS and SIEM engine, queues fixes for operator approval, and applies remediations with a single click. +- Both sides stream live tool calls, logs, and status updates to a shared **Arena Dashboard** that the operator watches in real time. + +What makes it unique: +- Fully autonomous AI agents using CrewAI + Azure OpenAI GPT-4o +- Real-time event bus connecting Red findings directly to Blue remediation +- Human-in-the-loop approval workflow — the operator approves or rejects each fix +- Built-in IDS (15 signature rules) and SIEM (7 MITRE ATT&CK phase correlation) +- Live score tracking for both teams --- -# The Final README Template +## Features + +### Red Agent (Attacker) +- 3-agent CrewAI crew: Infrastructure Auditor → Risk Analyst → Technical Verification Engineer +- 9 assessment tools: `nmap_scan`, `httpx_probe`, `gobuster_scan`, `nuclei_scan`, `katana_crawl`, `dirsearch_scan`, `nuclei_exploit`, `ffuf_fuzz`, `nmap_vuln_scan` +- Simulated tool results when Kali MCP server is unavailable (Windows-safe) +- Streams every tool card to the dashboard in real time via WebSocket +- Chat interface — type a target and the crew launches automatically -## Problem Statement / Idea +### Blue Agent (Defender) +- **Remediation Engine** — parses Red reports, maps 13 finding categories to fix actions, queues all fixes for operator approval +- **IDS Engine** — 15 signature rules (SIG-001 to SIG-015), fires an alert card per Red finding, maps each to a MITRE ATT&CK technique +- **SIEM Engine** — correlates events across 7 attack phases (Reconnaissance, Exploitation, Exfiltration, Persistence, Impact, Defense Evasion, Privilege Escalation), produces a risk score (0–10) +- **Approval Workflow** — operator clicks ✓ APPLY per fix or ✓ APPROVE ALL to apply everything at once +- SSH Scanner — connects to a live host, discovers services, performs CVE lookup, and applies fixes +- Defense endpoints: close port, harden service, isolate host, apply patch, verify fix -Clearly describe the problem you are solving. +### Arena Dashboard +- Split-screen Red vs Blue battleground +- Real-time tool call cards with RUNNING → DONE / FAILED status +- Live log streams for both agents +- Draggable divider between Red and Blue panels +- Live score ticker (Red score vs Blue score) +- Scoreboard tab with full history +- Download full battle report as `.txt` -- What is the problem? -- Why is it important? -- Who are the target users? +### Auth Service +- User registration and login with JWT tokens +- TOTP-based MFA (QR code setup) +- Per-team score tracking via REST API --- -## Proposed Solution +## Architecture -Explain your approach: +``` +┌─────────────────────────────────────────────────────────┐ +│ Arena Dashboard (React) │ +│ WebSocket ◄──────────────────► WebSocket │ +│ ws://localhost:8001/ws/red │ +│ ws://localhost:8002/ws/blue │ +└──────────────┬──────────────────────────┬───────────────┘ + │ │ + ┌───────▼────────┐ ┌─────────▼──────────┐ + │ Red Agent API │ │ Blue Agent API │ + │ :8001 │ │ :8002 │ + │ │ │ │ + │ CrewAI Crew │ │ RemediationEngine │ + │ ├ Auditor │ │ IDSEngine │ + │ ├ Analyst │ ───► │ SIEMEngine │ + │ └ Verifier │ │ SSHScanner │ + │ │ │ DefensePlanner │ + └────────────────┘ └─────────────────────┘ + │ │ + └──────────┬───────────────┘ + │ + ┌───────▼────────┐ + │ Event Bus │ + │ (pub/sub) │ + │ core/ │ + └───────┬────────┘ + │ + ┌───────▼────────┐ + │ Auth Service │ + │ :8003 │ + └────────────────┘ +``` -- What are you building? -- How does it solve the problem? -- What makes your solution unique? +--- + +## Tech Stack + +| Layer | Technology | +|-------|-----------| +| **Frontend** | React 18, TypeScript, Vite, Axios, native WebSocket | +| **Backend** | FastAPI, Uvicorn, Pydantic v2 | +| **AI Agents** | CrewAI (multi-agent framework), Azure OpenAI GPT-4o | +| **Auth** | JWT, TOTP (pyotp), bcrypt | +| **Real-time** | WebSocket (FastAPI), asyncio event bus (pub/sub) | +| **Security Tools** | nmap, nuclei, gobuster, httpx, ffuf, katana, dirsearch (via MCP / simulated) | +| **IDS / SIEM** | Custom Python engines, 15 IDS signatures, MITRE ATT&CK phase mapping | +| **SSH Scanning** | Paramiko | +| **Environment** | Python 3.12, Node.js 18+, Windows / Linux | --- -## Features +## API Endpoints + +### Red Agent (:8001) +| Method | Path | Description | +|--------|------|-------------| +| POST | `/chat` | Chat with Red agent, launch assessment | +| POST | `/scan/recon` | Start recon session | +| POST | `/exploit/auto` | Start exploit/verify session | +| GET | `/report/download/{id}` | Download assessment report | +| WS | `/ws/red` | Live tool calls and logs | -List the core features of your project: +### Blue Agent (:8002) +| Method | Path | Description | +|--------|------|-------------| +| POST | `/remediate/run-sample` | Run full Red→Blue pipeline with sample report | +| POST | `/remediate/ingest-report` | Ingest a custom Red report | +| GET | `/remediate/pending` | List fixes awaiting approval | +| POST | `/remediate/approve/{id}` | Approve and apply a single fix | +| POST | `/remediate/approve-all` | Approve and apply all pending fixes | +| POST | `/remediate/reject/{id}` | Reject a pending fix | +| GET | `/ids/status` | IDS engine status and alert summary | +| GET | `/ids/alerts` | Recent IDS alert list | +| GET | `/siem/report` | Correlated SIEM report with timeline | +| GET | `/siem/status` | SIEM engine status | +| POST | `/scan/ssh` | Run SSH scan on a live host | +| WS | `/ws/blue` | Live tool calls and logs | -- Feature 1 -- Feature 2 -- Feature 3 +### Auth Service (:8003) +| Method | Path | Description | +|--------|------|-------------| +| POST | `/auth/register` | Register a new user | +| POST | `/auth/login` | Login and receive JWT | +| GET | `/scores` | Get current Red/Blue scores | --- -## Tech Stack +## Project Setup -Mention all technologies used: +### Prerequisites +- Python 3.12+ +- Node.js 18+ and npm +- Azure OpenAI API key (GPT-4o deployment) -- Frontend: -- Backend: -- Database: -- APIs / Services: -- Tools / Libraries: +### 1. Clone and configure ---- +```bash +git clone +cd hacktofuture4-CO3 -## Project Setup Instructions +# Copy environment template and fill in your Azure key +cp .env.example .env +``` -Provide clear steps to run your project: +Edit `.env`: +```env +AZURE_OPENAI_API_KEY=your_key_here +AZURE_OPENAI_ENDPOINT=https://your-resource.cognitiveservices.azure.com/ +AZURE_OPENAI_API_VERSION=2024-12-01-preview +AZURE_OPENAI_MODEL=gpt-4o +``` + +### 2. Install Python dependencies ```bash -# Clone the repository -git clone +pip install -r requirements.txt +``` -# Install dependencies -... +### 3. Run everything (recommended) -# Run the project -... +```bash +./run.sh ``` + +This starts all services and opens the Arena Dashboard automatically: + +| Service | URL | +|---------|-----| +| Arena Dashboard | http://localhost:5173 | +| Red Agent API | http://localhost:8001 | +| Blue Agent API | http://localhost:8002 | +| Auth Service | http://localhost:8003 | + +### 4. Run specific modes + +```bash +./run.sh arena # Auth + backends + Arena dashboard only +./run.sh backends # All 3 backends only (no frontend) +./run.sh red # Red agent only +./run.sh blue # Blue agent only +``` + +--- + +## How to Use the Arena + +1. Open http://localhost:5173 and register an account +2. Set up MFA with your authenticator app +3. Log in — you land on the Arena battle screen + +**Red side (left panel):** +- Type a target URL or IP in the terminal input (e.g. `http://172.25.8.172:5000`) +- The Red crew launches automatically — watch tool cards appear in real time + +**Blue side (right panel):** +- Click **⟳ SEND REPORT** to receive the Red team's findings +- The Blue agent runs IDS detection, SIEM correlation, and queues fix cards +- Click **✓ APPLY** on individual fixes or **✓ APPROVE ALL** to apply everything +- Watch the Blue Log panel for IDS alerts and SIEM correlation reports + +**Scoreboard:** +- Click **🏆 SCOREBOARD** tab to see live Red vs Blue scores +- Click **⬇ REPORT** to download the full battle report as a `.txt` file + +--- + +## Project Structure + +``` +hacktofuture4-CO3/ +├── run.sh # Unified launcher for all services +├── requirements.txt +├── core/ +│ └── event_bus.py # Async pub/sub event bus (Red ↔ Blue) +├── auth_service/ # JWT + MFA auth service (:8003) +├── red_agent/ +│ ├── agents/ +│ │ ├── crew.py # CrewAI 3-agent crew definition +│ │ └── tools.py # 9 assessment tools with dashboard streaming +│ ├── backend/ +│ │ ├── main.py # FastAPI app (:8001) +│ │ ├── routers/chat_routes.py +│ │ └── services/ +│ │ ├── orchestrator.py # Mission lifecycle management +│ │ ├── llm_client.py # Azure OpenAI client +│ │ └── red_service.py +│ └── report_ingester.py # Parses Red reports into findings +├── blue_agent/ +│ ├── remediation/ +│ │ ├── remediation_engine.py # Approval-based fix pipeline +│ │ └── flask_fixer.py # Fix implementations +│ ├── ids/ +│ │ └── ids_engine.py # IDS: 15 signatures, real-time alerts +│ ├── siem/ +│ │ └── siem_engine.py # SIEM: event correlation, risk scoring +│ ├── scanner/ +│ │ └── ssh_scanner.py # SSH-based service + CVE scanner +│ └── backend/ +│ ├── main.py # FastAPI app (:8002) +│ ├── routers/ # defend, patch, strategy, scan, remediation, ids, siem +│ └── services/blue_service.py +└── dashboard/ + └── src/ + ├── pages/ArenaDashboard.tsx # Main battle UI + ├── api/ + │ ├── blueApi.ts + │ └── redApi.ts + └── hooks/ + ├── useRedWs.ts # Red WebSocket hook + └── useBlueWs.ts # Blue WebSocket hook +``` + +--- + +## Rules + +- Work must be done ONLY in the forked repository +- Only Four Contributors are allowed +- After 36 hours, make a PR to the Main Repository +- Do not copy code from other teams +- All commits must be from individual GitHub accounts +- Final submission must be pushed before the deadline diff --git a/auth_service/__init__.py b/auth_service/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/auth_service/auth_db.py b/auth_service/auth_db.py new file mode 100644 index 000000000..b563db5d1 --- /dev/null +++ b/auth_service/auth_db.py @@ -0,0 +1,74 @@ +"""In-memory user store with bcrypt password hashing.""" + +from __future__ import annotations + +import datetime +from typing import Dict, Optional + +try: + import bcrypt + + def _hash_password(password: str) -> str: + return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode() + + def _check_password(password: str, password_hash: str) -> bool: + return bcrypt.checkpw(password.encode(), password_hash.encode()) + +except ImportError: + import hashlib + + def _hash_password(password: str) -> str: + return hashlib.sha256(password.encode()).hexdigest() + + def _check_password(password: str, password_hash: str) -> bool: + return hashlib.sha256(password.encode()).hexdigest() == password_hash + + +# --------------------------------------------------------------------------- +# In-memory store: keyed by username +# --------------------------------------------------------------------------- +users: Dict[str, dict] = {} + + +def create_user( + username: str, + email: str, + password: str, + totp_secret: str, + role: str = "player", +) -> dict: + """Create a new user and return the stored dict (without the hash).""" + password_hash = _hash_password(password) + user = { + "username": username, + "email": email, + "password_hash": password_hash, + "totp_secret": totp_secret, + "mfa_verified": False, + "created_at": datetime.datetime.utcnow().isoformat(), + "role": role, + } + users[username] = user + return user + + +def get_user(username: str) -> Optional[dict]: + """Return user dict or None.""" + return users.get(username) + + +def verify_password(username: str, password: str) -> bool: + """Check a plain-text password against the stored hash.""" + user = get_user(username) + if user is None: + return False + return _check_password(password, user["password_hash"]) + + +def list_users() -> list: + """Return a list of all users (without password hashes).""" + safe: list = [] + for u in users.values(): + entry = {k: v for k, v in u.items() if k != "password_hash"} + safe.append(entry) + return safe diff --git a/auth_service/jwt_utils.py b/auth_service/jwt_utils.py new file mode 100644 index 000000000..0ebb0dbc7 --- /dev/null +++ b/auth_service/jwt_utils.py @@ -0,0 +1,40 @@ +"""JWT creation and verification utilities.""" + +from __future__ import annotations + +import datetime +import os +from typing import Dict + +import jwt # PyJWT + +SECRET_KEY: str = os.getenv("JWT_SECRET_KEY", "htf-arena-secret-key-change-me") +ALGORITHM: str = "HS256" + + +def create_access_token(username: str, role: str) -> str: + """Return a signed JWT access token (expires in 60 minutes).""" + payload: Dict[str, object] = { + "sub": username, + "role": role, + "type": "access", + "exp": datetime.datetime.utcnow() + datetime.timedelta(minutes=60), + "iat": datetime.datetime.utcnow(), + } + return jwt.encode(payload, SECRET_KEY, algorithm=ALGORITHM) + + +def create_refresh_token(username: str) -> str: + """Return a signed JWT refresh token (expires in 24 hours).""" + payload: Dict[str, object] = { + "sub": username, + "type": "refresh", + "exp": datetime.datetime.utcnow() + datetime.timedelta(hours=24), + "iat": datetime.datetime.utcnow(), + } + return jwt.encode(payload, SECRET_KEY, algorithm=ALGORITHM) + + +def decode_token(token: str) -> dict: + """Decode and validate a JWT. Raises jwt.ExpiredSignatureError or jwt.InvalidTokenError.""" + return jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) diff --git a/auth_service/main.py b/auth_service/main.py new file mode 100644 index 000000000..9d132e757 --- /dev/null +++ b/auth_service/main.py @@ -0,0 +1,39 @@ +"""Auth service – FastAPI app on port 8003. + +Provides authentication, MFA, and score tracking for HTF Arena. +""" + +from __future__ import annotations + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from auth_service.routes.auth_routes import router as auth_router +from auth_service.routes.score_routes import router as score_router + +AUTH_API_PORT = 8003 + +app = FastAPI(title="HTF Arena Auth Service", version="1.0.0") + +# CORS – allow the frontend dev servers +app.add_middleware( + CORSMiddleware, + allow_origins=[ + "http://localhost:5173", + "http://localhost:5174", + "http://localhost:3001", + "http://localhost:3002", + ], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Routers +app.include_router(auth_router, prefix="/auth", tags=["auth"]) +app.include_router(score_router, tags=["scores"]) + + +@app.get("/health") +async def health() -> dict: + return {"status": "ok", "service": "auth", "port": AUTH_API_PORT} diff --git a/auth_service/mfa.py b/auth_service/mfa.py new file mode 100644 index 000000000..2aafc5c88 --- /dev/null +++ b/auth_service/mfa.py @@ -0,0 +1,32 @@ +"""MFA utilities – TOTP generation and QR code rendering.""" + +from __future__ import annotations + +import base64 +import io + +import pyotp +import qrcode # type: ignore[import-untyped] + + +def generate_totp_secret() -> str: + """Return a fresh base32-encoded TOTP secret.""" + return pyotp.random_base32() + + +def generate_qr_code(username: str, secret: str) -> str: + """Return a base64-encoded PNG of the provisioning QR code.""" + totp = pyotp.TOTP(secret) + uri = totp.provisioning_uri(name=username, issuer_name="HTF Arena") + + img = qrcode.make(uri) + buf = io.BytesIO() + img.save(buf, format="PNG") + buf.seek(0) + return base64.b64encode(buf.read()).decode() + + +def verify_totp(secret: str, code: str) -> bool: + """Verify a 6-digit TOTP code (allows +/-1 window for clock drift).""" + totp = pyotp.TOTP(secret) + return totp.verify(code, valid_window=1) diff --git a/auth_service/routes/__init__.py b/auth_service/routes/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/auth_service/routes/auth_routes.py b/auth_service/routes/auth_routes.py new file mode 100644 index 000000000..84c79464b --- /dev/null +++ b/auth_service/routes/auth_routes.py @@ -0,0 +1,155 @@ +"""Authentication routes – register, MFA setup, login, refresh, me.""" + +from __future__ import annotations + +from typing import Optional + +from fastapi import APIRouter, Header, HTTPException +from pydantic import BaseModel + +from auth_service.auth_db import create_user, get_user, verify_password +from auth_service.jwt_utils import create_access_token, create_refresh_token, decode_token +from auth_service.mfa import generate_qr_code, generate_totp_secret, verify_totp + +router = APIRouter() + + +# --------------------------------------------------------------------------- +# Request / response models +# --------------------------------------------------------------------------- + +class RegisterRequest(BaseModel): + username: str + email: str + password: str + + +class VerifyMFARequest(BaseModel): + username: str + totp_code: str + + +class LoginRequest(BaseModel): + username: str + password: str + totp_code: str + + +class RefreshRequest(BaseModel): + refresh_token: str + + +# --------------------------------------------------------------------------- +# Endpoints +# --------------------------------------------------------------------------- + +@router.post("/register") +async def register(body: RegisterRequest) -> dict: + # Validation + if len(body.username) < 3: + raise HTTPException(status_code=400, detail="Username must be at least 3 characters") + if "@" not in body.email: + raise HTTPException(status_code=400, detail="Invalid email address") + if len(body.password) < 6: + raise HTTPException(status_code=400, detail="Password must be at least 6 characters") + if get_user(body.username) is not None: + raise HTTPException(status_code=409, detail="Username already exists") + + totp_secret = generate_totp_secret() + user = create_user( + username=body.username, + email=body.email, + password=body.password, + totp_secret=totp_secret, + ) + qr_code = generate_qr_code(body.username, totp_secret) + + return { + "user_id": user["username"], + "qr_code": qr_code, + "totp_secret": totp_secret, + "message": "Scan QR code with your authenticator app, then verify with /auth/verify-mfa-setup", + } + + +@router.post("/verify-mfa-setup") +async def verify_mfa_setup(body: VerifyMFARequest) -> dict: + user = get_user(body.username) + if user is None: + raise HTTPException(status_code=404, detail="User not found") + + if not verify_totp(user["totp_secret"], body.totp_code): + raise HTTPException(status_code=400, detail="Invalid TOTP code") + + user["mfa_verified"] = True + return {"verified": True} + + +@router.post("/login") +async def login(body: LoginRequest) -> dict: + user = get_user(body.username) + if user is None: + raise HTTPException(status_code=401, detail="Invalid credentials") + + if not verify_password(body.username, body.password): + raise HTTPException(status_code=401, detail="Invalid credentials") + + if not user.get("mfa_verified"): + raise HTTPException(status_code=403, detail="MFA not set up. Complete /auth/verify-mfa-setup first.") + + if not verify_totp(user["totp_secret"], body.totp_code): + raise HTTPException(status_code=401, detail="Invalid TOTP code") + + access_token = create_access_token(body.username, user["role"]) + refresh_token = create_refresh_token(body.username) + + return { + "access_token": access_token, + "refresh_token": refresh_token, + "username": user["username"], + "role": user["role"], + } + + +@router.post("/refresh") +async def refresh(body: RefreshRequest) -> dict: + try: + payload = decode_token(body.refresh_token) + except Exception: + raise HTTPException(status_code=401, detail="Invalid or expired refresh token") + + if payload.get("type") != "refresh": + raise HTTPException(status_code=401, detail="Token is not a refresh token") + + username: str = payload["sub"] + user = get_user(username) + if user is None: + raise HTTPException(status_code=401, detail="User no longer exists") + + access_token = create_access_token(username, user["role"]) + return {"access_token": access_token} + + +@router.get("/me") +async def me(authorization: Optional[str] = Header(default=None)) -> dict: + if not authorization or not authorization.startswith("Bearer "): + raise HTTPException(status_code=401, detail="Missing or invalid Authorization header") + + token = authorization.split(" ", 1)[1] + try: + payload = decode_token(token) + except Exception: + raise HTTPException(status_code=401, detail="Invalid or expired token") + + username: str = payload["sub"] + user = get_user(username) + if user is None: + raise HTTPException(status_code=404, detail="User not found") + + return { + "username": user["username"], + "email": user["email"], + "role": user["role"], + "mfa_verified": user["mfa_verified"], + "created_at": user["created_at"], + } diff --git a/auth_service/routes/score_routes.py b/auth_service/routes/score_routes.py new file mode 100644 index 000000000..3aa20f704 --- /dev/null +++ b/auth_service/routes/score_routes.py @@ -0,0 +1,85 @@ +"""Score tracking routes – award points and view leaderboard.""" + +from __future__ import annotations + +import datetime +from collections import deque +from typing import Deque, List + +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel + +router = APIRouter() + +# --------------------------------------------------------------------------- +# In-memory score store +# --------------------------------------------------------------------------- +red_score: int = 0 +blue_score: int = 0 +history: Deque[dict] = deque(maxlen=100) + + +# --------------------------------------------------------------------------- +# Request models +# --------------------------------------------------------------------------- + +class AwardRequest(BaseModel): + team: str # "red" or "blue" + points: int + reason: str + + +# --------------------------------------------------------------------------- +# Endpoints +# --------------------------------------------------------------------------- + +@router.post("/scores/award") +async def award_points(body: AwardRequest) -> dict: + global red_score, blue_score + + team = body.team.lower() + if team not in ("red", "blue"): + raise HTTPException(status_code=400, detail="team must be 'red' or 'blue'") + if body.points <= 0: + raise HTTPException(status_code=400, detail="points must be positive") + + if team == "red": + red_score += body.points + else: + blue_score += body.points + + entry = { + "team": team, + "points": body.points, + "reason": body.reason, + "timestamp": datetime.datetime.now().isoformat(), + } + history.appendleft(entry) + + return { + "red_score": red_score, + "blue_score": blue_score, + "awarded": entry, + } + + +@router.get("/scores") +async def get_scores() -> dict: + return { + "red_score": red_score, + "blue_score": blue_score, + "history": list(history), + } + + +@router.get("/scores/leaderboard") +async def leaderboard() -> List[dict]: + teams = [ + {"team": "red", "score": red_score}, + {"team": "blue", "score": blue_score}, + ] + teams.sort(key=lambda t: t["score"], reverse=True) + result: List[dict] = [] + for rank, t in enumerate(teams, start=1): + result.append({"team": t["team"], "score": t["score"], "rank": rank}) + return result diff --git a/auth_service/score_engine.py b/auth_service/score_engine.py new file mode 100644 index 000000000..1739db7a6 --- /dev/null +++ b/auth_service/score_engine.py @@ -0,0 +1,43 @@ +"""Score calculation engine for red/blue team findings and fixes.""" + +from __future__ import annotations + +from typing import Dict, List + +# Points awarded per severity level +SEVERITY_POINTS: Dict[str, int] = { + "critical": 100, + "high": 50, + "medium": 25, + "low": 10, +} + +# Multipliers – defense is harder so blue gets a bonus +RED_MULTIPLIER: float = 1.0 +BLUE_MULTIPLIER: float = 1.2 + + +def calc_red_points(findings: List[dict]) -> int: + """Calculate total red-team points from a list of findings. + + Each finding dict should have a ``severity`` key (critical/high/medium/low). + """ + total: float = 0.0 + for finding in findings: + severity = finding.get("severity", "low").lower() + base = SEVERITY_POINTS.get(severity, SEVERITY_POINTS["low"]) + total += base * RED_MULTIPLIER + return int(total) + + +def calc_blue_points(fixes: List[dict]) -> int: + """Calculate total blue-team points from a list of fixes. + + Each fix dict should have a ``severity`` key (critical/high/medium/low). + """ + total: float = 0.0 + for fix in fixes: + severity = fix.get("severity", "low").lower() + base = SEVERITY_POINTS.get(severity, SEVERITY_POINTS["low"]) + total += base * BLUE_MULTIPLIER + return int(total) diff --git a/blue_agent/Blue-README.md b/blue_agent/Blue-README.md new file mode 100644 index 000000000..e49d922d3 --- /dev/null +++ b/blue_agent/Blue-README.md @@ -0,0 +1,308 @@ +# Blue Agent — Implementation Reference + +HTF (Hack The Flag) · Red Team vs Blue Team AI Simulation +Target system: `192.168.1.100` + +--- + +## What Was Implemented + +This document describes every file that was **created or modified** to bring the Blue Agent from empty stubs to a fully autonomous, real-time defend-respond-patch system. + +--- + +## Files Changed + +### `core/event_bus.py` — Modified (full rewrite) + +**Role:** Central nervous system. Every Red action flows through here to trigger Blue's detection → response → patching chain. + +**Key changes:** +- Fully `async def` — all `emit()` and handler calls are coroutines +- `asyncio.Queue` internal buffer — events are never dropped under burst load +- Multiple subscribers per event type — registration order is preserved +- Single background worker task processes events in FIFO order, guaranteeing `detect → respond → patch` delivery sequence +- Graceful `stop()` drains the queue before cancelling the worker + +**Supported event types:** + +| Event | Emitted by | Handled by | +|---|---|---| +| `port_probed` | IntrusionDetector | ResponseEngine | +| `port_scanned` | IntrusionDetector, LogMonitor | ResponseEngine | +| `exploit_attempted` | LogMonitor | ResponseEngine, Isolator | +| `cve_detected` | LogMonitor | ResponseEngine | +| `anomaly_detected` | AnomalyDetector | ResponseEngine, Isolator | +| `misconfig_found` | (reserved) | — | +| `response_complete` | ResponseEngine | AutoPatcher | +| `isolation_complete` | Isolator | (terminal) | +| `patch_complete` | AutoPatcher | (terminal) | +| `blue_ready` | BlueController | (broadcast) | + +--- + +### `blue_agent/detector/intrusion_detector.py` — Implemented + +**Role:** Feature 1 — Real-Time Detection of port scans and active probes. + +**Behaviour:** +- Continuous `asyncio` polling loop, ticks every **1 second** +- Simulates Red agent probing with 70 % probability per tick +- Emits `port_probed` for every detected probe +- Emits `port_scanned` additionally for sensitive ports (21, 22, 23, 3306, 5432) +- Non-blocking — never pauses anomaly or log detection +- Tracks `detection_count` for live status reporting + +**Sample output:** +``` +19:28:19 < intrusion_detector: Port 23 probe detected +19:28:19 > event_bus.emit("port_probed", {"port": 23, "protocol": "tcp"}) +``` + +--- + +### `blue_agent/detector/anomaly_detector.py` — Implemented + +**Role:** Feature 1 — Real-Time Detection of unusual traffic patterns. + +**Detection rules (all emit `anomaly_detected`):** + +| Rule | Condition | Anomaly type | +|---|---|---| +| Scan rate | > 5 scans/second | `scan_rate` | +| Sensitive port | Access on port 21/22/23/3306 | `sensitive_port` | +| Traffic spike | > 8 hits/second on a single port | `traffic_spike` | + +**Behaviour:** +- Continuous `asyncio` loop, ticks every **1 second** +- Maintains rolling scan window and per-port hit counters +- Runs concurrently alongside intrusion_detector and log_monitor — zero blocking +- Resets per-port counter after emitting a spike alert to avoid spam + +**Sample output:** +``` +19:28:20 < anomaly_detector: Scan rate 7/s exceeds threshold (5/s) from 10.0.0.42 +19:28:20 > event_bus.emit("anomaly_detected", {"type": "scan_rate", "rate": 7, "source_ip": "10.0.0.42"}) +``` + +--- + +### `blue_agent/detector/log_monitor.py` — Implemented + +**Role:** Feature 1 — Real-Time Detection by tailing system logs for Red signatures. + +**Behaviour:** +- Maintains an internal rotating log buffer (max 500 lines) simulating `/var/log/syslog` +- **Injection task** — adds 1–3 realistic Red-agent log entries every 1.5 seconds +- **Tail loop** — processes new buffer lines every 1 second, pattern-matches signatures + +**Signature → event mapping:** + +| Signature | Example log pattern | Event emitted | +|---|---|---| +| `nmap` | `nmap -sV -p 3306 192.168.1.100` | `port_scanned` | +| `cve_lookup` | `searchsploit CVE-2023-44487` | `cve_detected` | +| `exploit` | `python3 exploit_ftp.py --target ...` | `exploit_attempted` | + +**Sample output:** +``` +19:28:21 < log_monitor: CVE lookup pattern found in logs → emitting cve_detected +19:28:21 > event_bus.emit("cve_detected", {"cve_id": "CVE-2023-44487", "service": "mysql", ...}) +``` + +--- + +### `blue_agent/responder/response_engine.py` — Implemented + +**Role:** Feature 2 — Real-Time Response. Reacts to every detection event immediately. + +**Event → action mapping:** + +| Event | Action | Status logged | +|---|---|---| +| `port_probed` / `port_scanned` | `close_port(port)` → iptables DROP (simulated) | `BLOCKED` | +| `exploit_attempted` | `isolate_service(service)` | `ISOLATED` | +| `cve_detected` | `harden_service(service, cve_id)` | `HARDENED` | +| `anomaly_detected` | `block_ip(source_ip)` | `BLOCKED` | + +**Behaviour:** +- All actions are idempotent — same port/IP/service is only acted on once +- After each action, `verify_fix()` confirms the block is in the simulated state +- Emits `response_complete` after every verified response (triggers AutoPatcher) + +**Sample output:** +``` +19:28:37 > close_port({"port": 3306, "protocol": "tcp"}) +19:28:38 < close_port: Port 3306/tcp blocked via iptables DROP rule +19:28:38 > verify_fix({"target": "192.168.1.100", "port": 3306}) +19:28:38 < verify_fix: Port 3306 is BLOCKED ✓ +``` + +--- + +### `blue_agent/responder/isolator.py` — Implemented + +**Role:** Feature 2 — Real-Time Response. Isolates services or source IPs under active attack. + +**Subscriptions:** +- `exploit_attempted` → `drop_inbound(port)` — drops all inbound traffic to the service port +- `anomaly_detected` → `drop_ip(source_ip)` — drops all traffic from the offending IP + +**Behaviour:** +- Completes isolation in **< 1 second** (30 ms simulated latency) +- Idempotent — isolating the same port/IP twice is a no-op +- Emits `isolation_complete` after each successful action + +**Sample output:** +``` +19:28:39 > isolator.drop_inbound({"port": 21, "protocol": "tcp"}) +19:28:39 < isolator: Port 21/tcp — all inbound traffic DROPPED +19:28:39 < isolator: Service 'ftp' on port 21 ISOLATED ✓ +``` + +--- + +### `blue_agent/patcher/auto_patcher.py` — Implemented + +**Role:** Feature 3 — Real-Time Patching. Fixes the root cause after every response. + +**Patch catalogue:** + +| Service | Ports | Action | What it does | +|---|---|---|---| +| `apache httpd` | 80, 443, 8080, 8443 | `patch` | Disable DIR-LISTING, apply security headers, harden server config | +| `mysql` | 3306 | `bind_local` | Enforce `bind-address=127.0.0.1`, block external access | +| `ftp` | 21 | `disable_anon` | Disable anonymous login, enforce auth, enable TLS | +| `telnet` | 23 | `remove_service` | Stop daemon, disable on boot, remove package | +| `ssh` | 22 | `harden` | Disable root login, enforce key-based auth, set MaxAuthTries 3 | +| `postgresql` | 5432 | `harden` | Restrict pg_hba.conf to local connections | + +**Behaviour:** +- Subscribes to `response_complete` +- Resolves service by name → port → partial match +- **Idempotent** — `service:port` patch key tracked; same patch is never applied twice +- Emits `patch_complete` after each patch + +**Sample output:** +``` +19:28:39 > harden_service({"service_name": "apache httpd", "port": 80, "action": "patch"}) +19:28:39 < harden_service: DIR-LISTING disabled, security headers applied ✓ +``` + +--- + +### `blue_agent/blue_controller.py` — Modified (full rewrite) + +**Role:** Main orchestrator — starts and connects all three features. + +**Startup sequence:** +1. Start EventBus worker +2. Register all subscriptions (response_engine, isolator, auto_patcher) +3. Emit `blue_ready` event +4. Launch all three detector loops **concurrently** via `asyncio.gather()` + +**`get_status()` returns:** +```json +{ + "detection_count": 42, + "response_count": 18, + "patch_count": 11, + "isolation_count": 7, + "running": true +} +``` + +**Concurrency guarantee:** `asyncio.gather()` with `return_exceptions=True` — a single detector crash cannot bring down the other two loops. + +--- + +## System Architecture + +``` +Red Agent Action + │ + ▼ + EventBus (asyncio.Queue — FIFO, never drops) + │ + ├──► IntrusionDetector ─┐ + │ [1s loop] │ port_probed + │ │ port_scanned + ├──► AnomalyDetector ──┤ anomaly_detected + │ [1s loop] │ + │ │ + └──► LogMonitor ──┘ port_scanned + [1s loop] cve_detected + exploit_attempted + │ + ┌────────────┴───────────┐ + ▼ ▼ + ResponseEngine Isolator + (close_port / (drop_inbound / + isolate / drop_ip) + harden / │ + block_ip) isolation_complete + │ + response_complete + │ + ▼ + AutoPatcher + (service-specific, + idempotent patches) + │ + patch_complete +``` + +## Concurrency Model + +All three detector loops run in parallel inside a single `asyncio.gather()` call. +No loop ever waits for another — detection continues even while patching is in progress. + +``` +Time → 1s 2s 3s + ┌──────┐ ┌──────┐ ┌──────┐ +ID │detect│ │detect│ │detect│ IntrusionDetector (1s tick) + └──────┘ └──────┘ └──────┘ + ┌──────┐ ┌──────┐ ┌──────┐ +AD │detect│ │detect│ │detect│ AnomalyDetector (1s tick) + └──────┘ └──────┘ └──────┘ + ┌──────┐ ┌──────┐ ┌──────┐ +LM │detect│ │detect│ │detect│ LogMonitor (1s tick) + └──────┘ └──────┘ └──────┘ + ┌─┐ ┌─┐ +RE │R│ │R│ ResponseEngine (event-driven) + └─┘ └─┘ + ┌──┐ ┌──┐ +AP │P │ │P │ AutoPatcher (event-driven) + └──┘ └──┘ +``` + +## Log Format + +Every action across all files follows this exact format: + +``` +{HH:MM:SS} < {component}: {result_message} +{HH:MM:SS} > {tool_name}({json_params}) +``` + +Example end-to-end chain: +``` +19:28:19 < intrusion_detector: Port 23 probe detected +19:28:19 > event_bus.emit("port_probed", {"port": 23, "protocol": "tcp"}) +19:28:19 > close_port({"port": 23, "protocol": "tcp"}) +19:28:19 < close_port: Port 23/tcp blocked via iptables DROP rule +19:28:19 > verify_fix({"target": "192.168.1.100", "port": 23}) +19:28:19 < verify_fix: Port 23 is BLOCKED ✓ +19:28:19 > harden_service({"service_name": "telnet", "port": 23, "action": "remove_service"}) +19:28:19 < harden_service: Telnet service removed entirely ✓ +``` + +## Simulation Notes + +All defence actions are **fully simulated in-memory**: +- No real `iptables` rules are created +- No real services are restarted or removed +- No real filesystem changes are made +- Safe to run on any OS (Windows, Linux, macOS) without root + +Blocked ports, isolated services, and applied patches are tracked in module-level Python sets. The simulation is deterministic enough to demonstrate the full detect → respond → patch chain in a live demo. diff --git a/blue_agent/__init__.py b/blue_agent/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/blue_agent/backend/Dockerfile b/blue_agent/backend/Dockerfile new file mode 100644 index 000000000..2f763d5bc --- /dev/null +++ b/blue_agent/backend/Dockerfile @@ -0,0 +1,36 @@ +# syntax=docker/dockerfile:1.6 +# Blue Agent Backend — FastAPI on port 8002 +# +# Build context MUST be the project root so the image can include +# the shared Python packages (core/, shared/, config/) alongside blue_agent/. +# +# docker build -f blue_agent/backend/Dockerfile -t htf-blue-backend . + +FROM python:3.11-slim + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + PIP_NO_CACHE_DIR=1 \ + PIP_DISABLE_PIP_VERSION_CHECK=1 + +WORKDIR /app + +RUN apt-get update \ + && apt-get install -y --no-install-recommends curl \ + && rm -rf /var/lib/apt/lists/* + +COPY requirements.txt ./ +RUN pip install -r requirements.txt + +# Copy only the Python packages the Blue backend needs. +COPY core ./core +COPY config ./config +COPY shared ./shared +COPY blue_agent ./blue_agent + +EXPOSE 8002 + +HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \ + CMD curl -fsS http://localhost:8002/health || exit 1 + +CMD ["uvicorn", "blue_agent.backend.main:app", "--host", "0.0.0.0", "--port", "8002"] diff --git a/blue_agent/backend/__init__.py b/blue_agent/backend/__init__.py new file mode 100644 index 000000000..d8f60b5f5 --- /dev/null +++ b/blue_agent/backend/__init__.py @@ -0,0 +1 @@ +"""FastAPI backend package for the Blue (defender) agent.""" diff --git a/blue_agent/backend/main.py b/blue_agent/backend/main.py new file mode 100644 index 000000000..73e50a6b5 --- /dev/null +++ b/blue_agent/backend/main.py @@ -0,0 +1,88 @@ +"""FastAPI entry point for the Blue Agent backend. + +Runs on port 8002. Exposes REST routes for defense / patch / strategy / +scan / environment operations plus a WebSocket channel that streams live +tool-call logs to the Blue Team dashboard. +""" + +from contextlib import asynccontextmanager +from pathlib import Path + +from dotenv import load_dotenv + +# Load .env from project root so CVE_API_KEY etc. are available +_env_path = Path(__file__).resolve().parents[2] / ".env" +load_dotenv(_env_path) + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from blue_agent.backend.routers import ( + defense_routes, + environment_routes, + ids_routes, + patch_routes, + remediation_routes, + scan_routes, + siem_routes, + strategy_routes, +) +from blue_agent.backend.websocket import blue_ws + +BLUE_API_PORT = 8002 + + +@asynccontextmanager +async def lifespan(app: FastAPI): + from core.event_bus import event_bus + from blue_agent.backend.services.blue_service import set_broadcast_callback + from blue_agent.backend.websocket.blue_ws import manager + + async def _broadcast(payload: dict) -> None: + await manager.broadcast(payload) + + set_broadcast_callback(_broadcast) + await event_bus.start() + yield + + +app = FastAPI( + title="HTF Blue Agent API", + description="Backend for the Blue (defender) AI agent in the HTF simulation.", + version="0.2.0", + lifespan=lifespan, +) + +app.add_middleware( + CORSMiddleware, + allow_origins=["http://localhost:3002", "http://localhost:5173", "http://localhost:5174"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +app.include_router(defense_routes.router, prefix="/defend", tags=["defend"]) +app.include_router(patch_routes.router, prefix="/patch", tags=["patch"]) +app.include_router(strategy_routes.router, prefix="/strategy", tags=["strategy"]) +app.include_router(scan_routes.router, prefix="/scan", tags=["scan"]) +app.include_router(environment_routes.router, prefix="/environment", tags=["environment"]) +app.include_router(remediation_routes.router, prefix="/remediate", tags=["remediation"]) +app.include_router(ids_routes.router, prefix="/ids", tags=["ids"]) +app.include_router(siem_routes.router, prefix="/siem", tags=["siem"]) +app.include_router(blue_ws.router, tags=["websocket"]) + + +@app.get("/health", tags=["meta"]) +async def health() -> dict[str, str]: + return {"status": "ok", "agent": "blue"} + + +if __name__ == "__main__": + import uvicorn + + uvicorn.run( + "blue_agent.backend.main:app", + host="0.0.0.0", + port=BLUE_API_PORT, + reload=True, + ) diff --git a/blue_agent/backend/routers/__init__.py b/blue_agent/backend/routers/__init__.py new file mode 100644 index 000000000..22d7d954a --- /dev/null +++ b/blue_agent/backend/routers/__init__.py @@ -0,0 +1 @@ +"""HTTP routers for the Blue Agent backend.""" diff --git a/blue_agent/backend/routers/defense_routes.py b/blue_agent/backend/routers/defense_routes.py new file mode 100644 index 000000000..78c3405f5 --- /dev/null +++ b/blue_agent/backend/routers/defense_routes.py @@ -0,0 +1,36 @@ +"""Defense endpoints for the Blue Agent.""" + +from typing import List + +from fastapi import APIRouter + +from blue_agent.backend.schemas.blue_schemas import ( + ClosePortRequest, + DefenseResult, + HardenServiceRequest, + IsolateHostRequest, + ToolCall, +) +from blue_agent.backend.services import blue_service + +router = APIRouter() + + +@router.post("/close_port", response_model=DefenseResult) +async def close_port(request: ClosePortRequest) -> DefenseResult: + return await blue_service.close_port(request) + + +@router.post("/harden_service", response_model=DefenseResult) +async def harden_service(request: HardenServiceRequest) -> DefenseResult: + return await blue_service.harden_service(request) + + +@router.post("/isolate_host", response_model=DefenseResult) +async def isolate_host(request: IsolateHostRequest) -> DefenseResult: + return await blue_service.isolate_host(request) + + +@router.get("/recent", response_model=List[ToolCall]) +async def recent_actions(limit: int = 20) -> List[ToolCall]: + return await blue_service.recent_tool_calls(category="defend", limit=limit) diff --git a/blue_agent/backend/routers/environment_routes.py b/blue_agent/backend/routers/environment_routes.py new file mode 100644 index 000000000..62c95ff04 --- /dev/null +++ b/blue_agent/backend/routers/environment_routes.py @@ -0,0 +1,25 @@ +"""Environment monitoring endpoints for the Blue Agent.""" + +from typing import List, Optional + +from fastapi import APIRouter + +from blue_agent.backend.schemas.blue_schemas import ( + EnvironmentAlertInfo, + EnvironmentStats, +) +from blue_agent.backend.services import blue_service + +router = APIRouter() + + +@router.get("/alerts", response_model=List[EnvironmentAlertInfo]) +async def get_alerts(environment: Optional[str] = None) -> List[EnvironmentAlertInfo]: + """Return environment security alerts, optionally filtered.""" + return await blue_service.get_environment_alerts(environment=environment) + + +@router.get("/stats", response_model=EnvironmentStats) +async def get_env_stats() -> EnvironmentStats: + """Return environment monitoring statistics.""" + return await blue_service.get_environment_stats() diff --git a/blue_agent/backend/routers/ids_routes.py b/blue_agent/backend/routers/ids_routes.py new file mode 100644 index 000000000..e60b88b13 --- /dev/null +++ b/blue_agent/backend/routers/ids_routes.py @@ -0,0 +1,24 @@ +"""IDS endpoints — intrusion alert status and alert history.""" + +from __future__ import annotations + +from typing import Any, Dict, List + +from fastapi import APIRouter + +from blue_agent.backend.services import blue_service + +router = APIRouter() + + +@router.get("/status") +async def ids_status() -> Dict[str, Any]: + """Current IDS engine status and alert summary.""" + return blue_service.get_ids_status() + + +@router.get("/alerts") +async def ids_alerts(limit: int = 50) -> List[Dict[str, Any]]: + """Recent IDS alerts (most recent first).""" + alerts = blue_service.get_ids_alerts(limit=limit) + return list(reversed(alerts)) diff --git a/blue_agent/backend/routers/patch_routes.py b/blue_agent/backend/routers/patch_routes.py new file mode 100644 index 000000000..874acc1da --- /dev/null +++ b/blue_agent/backend/routers/patch_routes.py @@ -0,0 +1,23 @@ +"""Patching endpoints for the Blue Agent.""" + +from fastapi import APIRouter + +from blue_agent.backend.schemas.blue_schemas import ( + PatchRequest, + PatchResult, + VerifyFixRequest, + VerifyFixResult, +) +from blue_agent.backend.services import blue_service + +router = APIRouter() + + +@router.post("/apply", response_model=PatchResult) +async def apply_patch(request: PatchRequest) -> PatchResult: + return await blue_service.apply_patch(request) + + +@router.post("/verify_fix", response_model=VerifyFixResult) +async def verify_fix(request: VerifyFixRequest) -> VerifyFixResult: + return await blue_service.verify_fix(request) diff --git a/blue_agent/backend/routers/remediation_routes.py b/blue_agent/backend/routers/remediation_routes.py new file mode 100644 index 000000000..3c461fb9a --- /dev/null +++ b/blue_agent/backend/routers/remediation_routes.py @@ -0,0 +1,83 @@ +"""Remediation endpoints — Red report ingestion, fix pipeline, and approval workflow.""" + +from __future__ import annotations + +from typing import List + +from fastapi import APIRouter, HTTPException + +from blue_agent.backend.schemas.blue_schemas import ( + ApprovalResult, + PendingFix, + RedReportRequest, + RemediationResult, + RemediationStatus, + ToolCall, +) +from blue_agent.backend.services import blue_service + +router = APIRouter() + + +@router.post("/ingest-report", response_model=RemediationResult) +async def ingest_red_report(report: RedReportRequest) -> RemediationResult: + """Receive a Red team pen-test report and trigger simultaneous remediation. + + The Blue Agent will parse each finding and apply fixes in real-time + while the report is being processed. + """ + return await blue_service.ingest_red_report(report) + + +@router.post("/run-sample", response_model=RemediationResult) +async def run_sample_remediation() -> RemediationResult: + """Run the full remediation pipeline using the sample Red team report. + + Triggers the complete Red → Blue pipeline with the known findings + from the 172.25.8.172:5000 pen-test. + """ + return await blue_service.run_sample_remediation() + + +@router.get("/status", response_model=RemediationStatus) +async def remediation_status() -> RemediationStatus: + """Get current remediation engine status.""" + return await blue_service.get_remediation_status() + + +@router.get("/recent", response_model=List[ToolCall]) +async def recent_remediation_actions(limit: int = 20) -> List[ToolCall]: + return await blue_service.recent_tool_calls(category="remediation", limit=limit) + + +# ── Approval workflow endpoints ───────────────────────────────────── + + +@router.get("/pending", response_model=List[PendingFix]) +async def pending_fixes() -> List[PendingFix]: + """Return all fixes currently awaiting user approval.""" + return await blue_service.get_pending_fixes() + + +@router.post("/approve/{fix_id}", response_model=ApprovalResult) +async def approve_fix(fix_id: str) -> ApprovalResult: + """Approve and apply a single pending fix.""" + try: + return await blue_service.approve_fix(fix_id) + except ValueError as exc: + raise HTTPException(status_code=404, detail=str(exc)) + + +@router.post("/approve-all", response_model=List[ApprovalResult]) +async def approve_all_fixes() -> List[ApprovalResult]: + """Approve and apply all pending fixes at once.""" + return await blue_service.approve_all_fixes() + + +@router.post("/reject/{fix_id}", response_model=ApprovalResult) +async def reject_fix(fix_id: str) -> ApprovalResult: + """Reject a pending fix, removing it from the queue.""" + try: + return await blue_service.reject_fix(fix_id) + except ValueError as exc: + raise HTTPException(status_code=404, detail=str(exc)) diff --git a/blue_agent/backend/routers/scan_routes.py b/blue_agent/backend/routers/scan_routes.py new file mode 100644 index 000000000..b4b3edeef --- /dev/null +++ b/blue_agent/backend/routers/scan_routes.py @@ -0,0 +1,71 @@ +"""Asset scanning and CVE lookup endpoints for the Blue Agent.""" + +from typing import List, Optional + +from fastapi import APIRouter + +from blue_agent.backend.schemas.blue_schemas import ( + AssetInfo, + SSHCredentials, + SSHScanResult, + ScanRequest, + ScanResult, + VulnerabilityInfo, +) +from blue_agent.backend.services import blue_service + +router = APIRouter() + + +@router.get("/inventory", response_model=List[AssetInfo]) +async def get_inventory(environment: Optional[str] = None) -> List[AssetInfo]: + """Return the full asset inventory, optionally filtered by environment.""" + return await blue_service.get_asset_inventory(environment=environment) + + +@router.get("/vulnerable", response_model=List[AssetInfo]) +async def get_vulnerable_assets() -> List[AssetInfo]: + """Return only assets with known CVEs.""" + return await blue_service.get_vulnerable_assets() + + +@router.get("/stats") +async def get_scan_stats() -> dict: + """Return scan statistics.""" + return await blue_service.get_scan_stats() + + +@router.get("/vulnerabilities", response_model=List[VulnerabilityInfo]) +async def get_all_vulnerabilities() -> List[VulnerabilityInfo]: + """Return all discovered vulnerabilities across all assets.""" + return await blue_service.get_all_vulnerabilities() + + +@router.post("/ssh", response_model=SSHScanResult) +async def ssh_scan(creds: SSHCredentials) -> SSHScanResult: + """Connect to a server via SSH, discover all software, lookup CVEs.""" + result = await blue_service.run_ssh_scan( + host=creds.host, + ssh_port=creds.ssh_port, + username=creds.username, + password=creds.password, + ) + return SSHScanResult(**result) + + +@router.post("/ssh/apply-fixes") +async def apply_fixes() -> dict: + """Apply the proposed fixes from the last scan.""" + return await blue_service.apply_ssh_fixes() + + +@router.get("/ssh/results") +async def ssh_scan_results() -> list: + """Return results from the last SSH scan.""" + return blue_service.get_ssh_scan_results() + + +@router.get("/ssh/stats") +async def ssh_scan_stats() -> dict: + """Return SSH scan statistics.""" + return blue_service.get_ssh_scan_stats() diff --git a/blue_agent/backend/routers/siem_routes.py b/blue_agent/backend/routers/siem_routes.py new file mode 100644 index 000000000..d7b7f6986 --- /dev/null +++ b/blue_agent/backend/routers/siem_routes.py @@ -0,0 +1,23 @@ +"""SIEM endpoints — correlated security report and event status.""" + +from __future__ import annotations + +from typing import Any, Dict + +from fastapi import APIRouter + +from blue_agent.backend.services import blue_service + +router = APIRouter() + + +@router.get("/report") +async def siem_report() -> Dict[str, Any]: + """Full correlated SIEM report with attack timeline and risk score.""" + return blue_service.get_siem_report() + + +@router.get("/status") +async def siem_status() -> Dict[str, Any]: + """Current SIEM engine status.""" + return blue_service.get_siem_status() diff --git a/blue_agent/backend/routers/strategy_routes.py b/blue_agent/backend/routers/strategy_routes.py new file mode 100644 index 000000000..5efa29e5c --- /dev/null +++ b/blue_agent/backend/routers/strategy_routes.py @@ -0,0 +1,38 @@ +"""Strategy and evolution endpoints for the Blue Agent.""" + +from fastapi import APIRouter + +from blue_agent.backend.schemas.blue_schemas import ( + BlueAgentStatus, + DefensePlan, + EvolutionMetrics, + StrategyRequest, +) +from blue_agent.backend.services import blue_service + +router = APIRouter() + + +@router.post("/plan", response_model=DefensePlan) +async def plan_defense(request: StrategyRequest) -> DefensePlan: + return await blue_service.plan_defense(request) + + +@router.post("/evolve", response_model=DefensePlan) +async def evolve_strategy(request: StrategyRequest) -> DefensePlan: + return await blue_service.evolve_strategy(request) + + +@router.get("/current", response_model=DefensePlan) +async def current_strategy() -> DefensePlan: + return await blue_service.current_strategy() + + +@router.get("/evolution", response_model=EvolutionMetrics) +async def evolution_metrics() -> EvolutionMetrics: + return await blue_service.get_evolution_metrics() + + +@router.get("/status", response_model=BlueAgentStatus) +async def agent_status() -> BlueAgentStatus: + return await blue_service.get_agent_status() diff --git a/blue_agent/backend/schemas/__init__.py b/blue_agent/backend/schemas/__init__.py new file mode 100644 index 000000000..771d204e2 --- /dev/null +++ b/blue_agent/backend/schemas/__init__.py @@ -0,0 +1 @@ +"""Pydantic schemas exposed by the Blue Agent backend.""" diff --git a/blue_agent/backend/schemas/blue_schemas.py b/blue_agent/backend/schemas/blue_schemas.py new file mode 100644 index 000000000..ae37a1f14 --- /dev/null +++ b/blue_agent/backend/schemas/blue_schemas.py @@ -0,0 +1,266 @@ +"""Request / response models for the Blue Agent backend.""" + +from __future__ import annotations + +from datetime import datetime +from enum import Enum +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel, Field + + +class ToolStatus(str, Enum): + PENDING = "PENDING" + RUNNING = "RUNNING" + DONE = "DONE" + FAILED = "FAILED" + + +class ToolCall(BaseModel): + id: str + name: str = Field(..., description="Tool name, e.g. close_port, verify_fix") + category: str = Field(..., description="defend | patch | strategy | scan | environment | evolution") + status: ToolStatus = ToolStatus.PENDING + params: Dict[str, Any] = Field(default_factory=dict) + result: Optional[Dict[str, Any]] = None + started_at: datetime = Field(default_factory=datetime.now) + finished_at: Optional[datetime] = None + + +class LogEntry(BaseModel): + timestamp: datetime = Field(default_factory=datetime.now) + level: str = "INFO" + message: str + tool_id: Optional[str] = None + + +# ── Defense ────────────────────────────────────────────────────────── + +class ClosePortRequest(BaseModel): + host: str + port: int + protocol: str = "tcp" + + +class HardenServiceRequest(BaseModel): + host: str + service: str + options: Dict[str, Any] = Field(default_factory=dict) + + +class IsolateHostRequest(BaseModel): + host: str + reason: Optional[str] = None + + +class DefenseResult(BaseModel): + tool_call: ToolCall + success: bool = True + detail: Optional[str] = None + + +# ── Patching ───────────────────────────────────────────────────────── + +class PatchRequest(BaseModel): + host: str + cve_id: Optional[str] = None + package: Optional[str] = None + + +class PatchResult(BaseModel): + tool_call: ToolCall + applied: bool = False + notes: Optional[str] = None + + +class VerifyFixRequest(BaseModel): + host: str + cve_id: str + + +class VerifyFixResult(BaseModel): + tool_call: ToolCall + verified: bool = False + evidence: Optional[str] = None + + +# ── Strategy ───────────────────────────────────────────────────────── + +class StrategyRequest(BaseModel): + host: str + threat: Dict[str, Any] = Field(default_factory=dict) + + +class DefensePlan(BaseModel): + tool_call: ToolCall + steps: List[str] = Field(default_factory=list) + rationale: Optional[str] = None + + +# ── Asset Scanning ─────────────────────────────────────────────────── + +class ScanRequest(BaseModel): + environment: Optional[str] = None # cloud, onprem, hybrid, or None for all + + +class AssetInfo(BaseModel): + asset_id: str + host: str + port: int + service: str + environment: str + layer: str + version: Optional[str] = None + banner: Optional[str] = None + detection_method: Optional[str] = None + cve_count: int = 0 + cves: List[Dict[str, Any]] = Field(default_factory=list) + last_scanned: Optional[float] = None + status: str = "discovered" + + +class ScanResult(BaseModel): + tool_call: ToolCall + assets: List[AssetInfo] = Field(default_factory=list) + stats: Dict[str, Any] = Field(default_factory=dict) + + +class VulnerabilityInfo(BaseModel): + cve_id: str + severity: str + cvss_score: float + description: str + affected_software: str + affected_version: str + fix: str + host: Optional[str] = None + port: Optional[int] = None + + +# ── Environment Monitoring ─────────────────────────────────────────── + +class EnvironmentAlertInfo(BaseModel): + alert_id: str + environment: str + category: str + severity: str + title: str + description: str + resource: str + recommendation: str + timestamp: float + + +class EnvironmentStats(BaseModel): + total_alerts: int = 0 + by_environment: Dict[str, int] = Field(default_factory=dict) + by_severity: Dict[str, int] = Field(default_factory=dict) + by_category: Dict[str, int] = Field(default_factory=dict) + monitoring_active: bool = False + + +# ── Evolution ──────────────────────────────────────────────────────── + +class EvolutionMetrics(BaseModel): + evolution_count: int = 0 + round_count: int = 0 + avg_response_time_ms: float = 0.0 + response_accuracy_pct: float = 0.0 + improvement_pct: float = 0.0 + current_params: Dict[str, Any] = Field(default_factory=dict) + top_attack_patterns: List[Dict[str, Any]] = Field(default_factory=list) + total_patterns_tracked: int = 0 + + +# ── SSH Scanning ───────────────────────────────────────────────────── + +class SSHCredentials(BaseModel): + host: str + ssh_port: int = 22 + username: str = "root" + password: str + + +class SSHScanResult(BaseModel): + success: bool + host: str + error: Optional[str] = None + os_info: Optional[str] = None + listening_ports: List[Dict[str, Any]] = Field(default_factory=list) + services: List[Dict[str, Any]] = Field(default_factory=list) + total_services: int = 0 + total_cves: int = 0 + fixes_applied: int = 0 + elapsed_seconds: float = 0.0 + + +# ── Full Status ────────────────────────────────────────────────────── + +class BlueAgentStatus(BaseModel): + running: bool = False + detection_count: int = 0 + response_count: int = 0 + patch_count: int = 0 + cve_fix_count: int = 0 + isolation_count: int = 0 + scan_cycles: int = 0 + assets_discovered: int = 0 + vulnerable_assets: int = 0 + total_vulnerabilities: int = 0 + environment_alerts: int = 0 + evolution_rounds: int = 0 + defense_plans: int = 0 + remediation_findings: int = 0 + remediation_fixes: int = 0 + + +# ── Red Report Ingestion ──────────────────────────────────────────── + +class RedReportRequest(BaseModel): + """Structured Red team penetration test report.""" + target: str = Field(..., description="Target URL, e.g. http://172.25.8.172:5000") + risk_score: float = Field(0.0, description="Overall risk score 0-10") + recon: Dict[str, Any] = Field(default_factory=dict, description="Phase 1: Recon findings") + exploit: Dict[str, Any] = Field(default_factory=dict, description="Phase 2: Exploit findings") + recommendations: List[Dict[str, Any]] = Field(default_factory=list, description="Remediation recommendations") + + +class RemediationResult(BaseModel): + """Result of the Blue Agent remediation pipeline.""" + target: str + risk_score: float + total_findings: int = 0 + fixes_applied: int = 0 + total_steps: int = 0 + severity_counts: Dict[str, int] = Field(default_factory=dict) + applied_fixes: List[Dict[str, Any]] = Field(default_factory=list) + pending_fixes: List[Dict[str, Any]] = Field(default_factory=list) + status: str = "complete" + + +class RemediationStatus(BaseModel): + """Current status of the remediation engine.""" + findings_received: int = 0 + fixes_dispatched: int = 0 + total_steps: int = 0 + applied_fixes: List[Dict[str, Any]] = Field(default_factory=list) + + +# ── Approval Workflow ────────────────────────────────────────────── + +class PendingFix(BaseModel): + """A fix awaiting user approval before it is applied.""" + fix_id: str + category: str + severity: str + description: str + endpoint: Optional[str] = None + status: str = "pending_approval" + finding_details: Dict[str, Any] = Field(default_factory=dict) + + +class ApprovalResult(BaseModel): + """Result of approving or rejecting a pending fix.""" + fix_id: str + status: str # "approved" or "rejected" + fix_result: Optional[Dict[str, Any]] = None diff --git a/blue_agent/backend/services/__init__.py b/blue_agent/backend/services/__init__.py new file mode 100644 index 000000000..bc457ba64 --- /dev/null +++ b/blue_agent/backend/services/__init__.py @@ -0,0 +1 @@ +"""Service layer wiring HTTP routes to the Blue agent's domain modules.""" diff --git a/blue_agent/backend/services/blue_service.py b/blue_agent/backend/services/blue_service.py new file mode 100644 index 000000000..243be715e --- /dev/null +++ b/blue_agent/backend/services/blue_service.py @@ -0,0 +1,431 @@ +"""Bridge between the HTTP/WS layer and the Blue agent's domain modules.""" + +from __future__ import annotations + +import asyncio +import uuid +from collections import deque +from datetime import datetime +from typing import Any, Callable, Deque, Dict, List, Optional + +from blue_agent.backend.schemas.blue_schemas import ( + ApprovalResult, + AssetInfo, + BlueAgentStatus, + ClosePortRequest, + DefensePlan, + DefenseResult, + EnvironmentAlertInfo, + EnvironmentStats, + EvolutionMetrics, + HardenServiceRequest, + IsolateHostRequest, + LogEntry, + PatchRequest, + PatchResult, + PendingFix, + RedReportRequest, + RemediationResult, + RemediationStatus, + StrategyRequest, + ToolCall, + ToolStatus, + VerifyFixRequest, + VerifyFixResult, + VulnerabilityInfo, +) +from blue_agent.scanner.asset_scanner import AssetScanner +from blue_agent.scanner.ssh_scanner import SSHScanner +from blue_agent.environment.environment_manager import EnvironmentManager +from blue_agent.strategy.defense_evolver import DefenseEvolver +from blue_agent.strategy.defense_planner import DefensePlanner +from blue_agent.remediation.remediation_engine import RemediationEngine +from blue_agent.ids.ids_engine import IDSEngine +from blue_agent.siem.siem_engine import SIEMEngine + +_TOOL_HISTORY: Deque[ToolCall] = deque(maxlen=200) +_LOG_HISTORY: Deque[LogEntry] = deque(maxlen=500) + +_asset_scanner = AssetScanner() +_ssh_scanner = SSHScanner() +_environment_manager = EnvironmentManager() +_defense_planner = DefensePlanner() +_defense_evolver = DefenseEvolver() +_remediation_engine = RemediationEngine() +_ids_engine = IDSEngine() +_siem_engine = SIEMEngine() + +# --------------------------------------------------------------------------- +# Real-time broadcast bridge — WebSocket registers its callback here +# --------------------------------------------------------------------------- + +_broadcast_cb: Optional[Callable] = None + + +def set_broadcast_callback(cb: Callable) -> None: + """Called by blue_ws.py to register the WebSocket broadcast function.""" + global _broadcast_cb + _broadcast_cb = cb + _ids_engine.set_broadcast(cb) + _siem_engine.set_broadcast(cb) + + +def clear_history() -> None: + """Wipe accumulated log and tool-call history on fresh client connection.""" + _LOG_HISTORY.clear() + _TOOL_HISTORY.clear() + + +def _broadcast(payload: dict) -> None: + """Push a payload to all connected WebSocket clients.""" + if _broadcast_cb: + try: + loop = asyncio.get_event_loop() + if loop.is_running(): + asyncio.ensure_future(_broadcast_cb(payload)) + except RuntimeError: + pass + + +def add_log(message: str, level: str = "INFO", tool_id: Optional[str] = None) -> LogEntry: + """Add a log entry and broadcast it to all dashboard clients.""" + entry = LogEntry(level=level, message=message, tool_id=tool_id) + _LOG_HISTORY.append(entry) + _broadcast({"type": "log", "payload": entry.model_dump(mode="json")}) + return entry + + +def _new_tool_call(name: str, category: str, params: Dict[str, Any]) -> ToolCall: + call = ToolCall( + id=str(uuid.uuid4()), + name=name, + category=category, + status=ToolStatus.RUNNING, + params=params, + ) + _TOOL_HISTORY.append(call) + _broadcast({"type": "tool_call", "payload": call.model_dump(mode="json")}) + return call + + +def _finish(call: ToolCall, result: Dict[str, Any], status: ToolStatus = ToolStatus.DONE) -> ToolCall: + call.status = status + call.result = result + call.finished_at = datetime.now() + add_log( + f"{call.name} -> {status.value}" + (f" | {result.get('detail', '')}" if result.get('detail') else ""), + level="INFO" if status is ToolStatus.DONE else "ERROR", + tool_id=call.id, + ) + _broadcast({"type": "tool_call", "payload": call.model_dump(mode="json")}) + return call + + +# ── Defense endpoints ──────────────────────────────────────────────── + +async def close_port(request: ClosePortRequest) -> DefenseResult: + call = _new_tool_call("close_port", "defend", request.model_dump()) + return DefenseResult( + tool_call=_finish(call, {"closed": True, "detail": f"closed {request.protocol}/{request.port}"}), + detail=f"closed {request.protocol}/{request.port} on {request.host}", + ) + + +async def harden_service(request: HardenServiceRequest) -> DefenseResult: + call = _new_tool_call("harden_service", "defend", request.model_dump()) + return DefenseResult( + tool_call=_finish(call, {"hardened": request.service, "detail": f"hardened {request.service}"}), + detail=f"hardened {request.service} on {request.host}", + ) + + +async def isolate_host(request: IsolateHostRequest) -> DefenseResult: + call = _new_tool_call("isolate_host", "defend", request.model_dump()) + return DefenseResult( + tool_call=_finish(call, {"isolated": request.host}), + detail=request.reason or "isolated", + ) + + +# ── Patch endpoints ────────────────────────────────────────────────── + +async def apply_patch(request: PatchRequest) -> PatchResult: + call = _new_tool_call("apply_patch", "patch", request.model_dump()) + return PatchResult(tool_call=_finish(call, {"applied": True}), applied=True) + + +async def verify_fix(request: VerifyFixRequest) -> VerifyFixResult: + call = _new_tool_call("verify_fix", "patch", request.model_dump()) + return VerifyFixResult( + tool_call=_finish(call, {"verified": True}), + verified=True, + evidence="re-scan returned no matching CVE signature", + ) + + +# ── Strategy endpoints ─────────────────────────────────────────────── + +async def plan_defense(request: StrategyRequest) -> DefensePlan: + call = _new_tool_call("plan_defense", "strategy", request.model_dump()) + plan = _defense_planner.get_current_plan() + steps = [a.get("reason", a.get("action", "")) for a in plan[:10]] + return DefensePlan( + tool_call=_finish(call, {"steps": steps, "plan_count": len(plan)}), + steps=steps, + ) + + +async def evolve_strategy(request: StrategyRequest) -> DefensePlan: + call = _new_tool_call("evolve_strategy", "strategy", request.model_dump()) + metrics = _defense_evolver.get_metrics() + steps = [f"Evolution #{metrics['evolution_count']}", f"Accuracy: {metrics['response_accuracy_pct']:.1f}%"] + return DefensePlan(tool_call=_finish(call, metrics), steps=steps) + + +async def current_strategy() -> DefensePlan: + call = _new_tool_call("current_strategy", "strategy", {}) + plan = _defense_planner.get_current_plan() + threat = _defense_planner.get_threat_summary() + steps = [a.get("reason", "") for a in plan[:10]] + return DefensePlan( + tool_call=_finish(call, {**threat, "plan_actions": len(plan)}), + steps=steps, + ) + + +# ── SSH scan — the main pipeline ───────────────────────────────────── + +async def run_ssh_scan(host: str, ssh_port: int, username: str, password: str) -> dict: + """Full pipeline: SSH connect → discover → CVE lookup → fix → verify. + + Every step is logged as a ToolCall + LogEntry and broadcast to the + dashboard in real-time via WebSocket. + """ + # Pass the logging callback to the scanner so it can stream progress + result = await _ssh_scanner.scan( + host, ssh_port, username, password, + log_cb=add_log, + tool_cb=_create_scan_tool, + ) + return result + + +def _create_scan_tool(name: str, params: dict, result: dict, status: str = "DONE") -> None: + """Helper: create a completed ToolCall for a scan step.""" + call = _new_tool_call(name, "scan", params) + _finish(call, result, ToolStatus.DONE if status == "DONE" else ToolStatus.FAILED) + + +# ── Asset scanning endpoints ───────────────────────────────────────── + +async def get_asset_inventory(environment: Optional[str] = None) -> list: + if environment: + by_env = _asset_scanner.get_inventory_by_environment() + items = by_env.get(environment, []) + else: + items = _asset_scanner.get_inventory() + return [AssetInfo(**item) for item in items] + + +async def get_vulnerable_assets() -> list: + items = _asset_scanner.get_vulnerable_assets() + return [AssetInfo(**item) for item in items] + + +async def get_scan_stats() -> dict: + return _ssh_scanner.get_stats() + + +async def get_all_vulnerabilities() -> list: + vulns = [] + for svc in _ssh_scanner.last_scan_results: + for cve in svc.cves: + vulns.append(VulnerabilityInfo( + cve_id=cve.cve_id, + severity=cve.severity, + cvss_score=cve.cvss_score, + description=cve.description, + affected_software=cve.affected_software, + affected_version=cve.affected_version, + fix=cve.fix, + )) + return vulns + + +# ── Environment monitoring endpoints ───────────────────────────────── + +async def get_environment_alerts(environment: Optional[str] = None) -> list: + items = _environment_manager.get_alerts(environment=environment) + return [EnvironmentAlertInfo(**item) for item in items] + + +async def get_environment_stats() -> EnvironmentStats: + stats = _environment_manager.get_stats() + return EnvironmentStats(**stats) + + +# ── Evolution endpoints ────────────────────────────────────────────── + +async def get_evolution_metrics() -> EvolutionMetrics: + metrics = _defense_evolver.get_metrics() + return EvolutionMetrics(**metrics) + + +# ── Full agent status ──────────────────────────────────────────────── + +async def get_agent_status() -> BlueAgentStatus: + ssh_stats = _ssh_scanner.get_stats() + return BlueAgentStatus( + running=True, + detection_count=ssh_stats.get("services_found", 0), + response_count=ssh_stats.get("total_cves", 0), + patch_count=ssh_stats.get("fixes_applied", 0), + cve_fix_count=ssh_stats.get("fixes_applied", 0), + isolation_count=0, + scan_cycles=ssh_stats.get("scan_count", 0), + assets_discovered=ssh_stats.get("services_found", 0), + vulnerable_assets=ssh_stats.get("vulnerable_services", 0), + total_vulnerabilities=ssh_stats.get("total_cves", 0), + environment_alerts=0, + evolution_rounds=0, + defense_plans=0, + ) + + +async def apply_ssh_fixes() -> dict: + """Step 2: apply approved fixes on the server.""" + result = await _ssh_scanner.apply_fixes( + log_cb=add_log, + tool_cb=_create_scan_tool, + ) + return result + + +def get_ssh_scan_results() -> list: + return _ssh_scanner.get_results() + + +def get_ssh_scan_stats() -> dict: + return _ssh_scanner.get_stats() + + +# ── Approval workflow endpoints ────────────────────────────────────── + +async def get_pending_fixes() -> List[PendingFix]: + """Return all fixes currently awaiting user approval.""" + items = _remediation_engine.get_pending_fixes() + return [PendingFix(**item) for item in items] + + +async def approve_fix(fix_id: str) -> ApprovalResult: + """Approve and apply a single pending fix.""" + result = await _remediation_engine.approve_fix(fix_id) + return ApprovalResult(**result) + + +async def approve_all_fixes() -> List[ApprovalResult]: + """Approve and apply every pending fix.""" + results = await _remediation_engine.approve_all() + return [ApprovalResult(**r) for r in results] + + +async def reject_fix(fix_id: str) -> ApprovalResult: + """Reject a pending fix, removing it from the queue.""" + result = _remediation_engine.reject_fix(fix_id) + return ApprovalResult(**result) + + +# ── History endpoints ──────────────────────────────────────────────── + +async def recent_tool_calls(category: Optional[str] = None, limit: int = 20) -> list: + items = list(_TOOL_HISTORY) + if category: + items = [c for c in items if c.category == category] + return items[-limit:] + + +async def recent_logs(limit: int = 100) -> list: + return list(_LOG_HISTORY)[-limit:] + + +# ── Remediation endpoints (Red report → Blue fix pipeline) ────────── + +async def ingest_red_report(report: RedReportRequest) -> RemediationResult: + """Process a Red team report and apply all fixes simultaneously.""" + from core.event_bus import event_bus + + # Ensure event bus is running for the remediation pipeline + await event_bus.start() + _remediation_engine.register() + _ids_engine.register() + _siem_engine.register() + + call = _new_tool_call("ingest_red_report", "remediation", { + "target": report.target, + "risk_score": report.risk_score, + }) + + add_log(f"Ingesting Red team report for {report.target} (risk: {report.risk_score}/10)", level="INFO") + + # Run the full pipeline: parse → publish findings → remediate simultaneously + result = await _remediation_engine.remediate_full_report(report.model_dump()) + + report_summary = result.get("report_summary", {}) + remediation = result.get("remediation", {}) + + _finish(call, { + "findings": report_summary.get("total_findings", 0), + "fixes": remediation.get("fixes_applied", 0), + "status": "complete", + }) + + # Use the fix list built directly inside remediate_full_report (no state-read gap) + pending = result.get("pending_fixes_list", []) + print(f"[blue_service] ingest_red_report: pending_fixes_list has {len(pending)} items") + + return RemediationResult( + target=report.target, + risk_score=report.risk_score, + total_findings=report_summary.get("total_findings", 0), + fixes_applied=remediation.get("fixes_applied", 0), + total_steps=remediation.get("total_steps", 0), + severity_counts=report_summary.get("severity_counts", {}), + applied_fixes=remediation.get("applied_fixes", []), + pending_fixes=pending, + status="complete", + ) + + +async def run_sample_remediation() -> RemediationResult: + """Run remediation using the sample Red team report.""" + from red_agent.report_ingester import build_report_from_sample + + sample = build_report_from_sample() + report = RedReportRequest(**sample) + return await ingest_red_report(report) + + +async def get_remediation_status() -> RemediationStatus: + """Get current status of the remediation engine.""" + status = _remediation_engine.get_status() + return RemediationStatus(**status) + + +# ── IDS endpoints ──────────────────────────────────────────────────── + +def get_ids_status() -> dict: + return _ids_engine.get_status() + + +def get_ids_alerts(limit: int = 50) -> list: + return _ids_engine.get_alerts(limit=limit) + + +# ── SIEM endpoints ─────────────────────────────────────────────────── + +def get_siem_report() -> dict: + return _siem_engine.get_report() + + +def get_siem_status() -> dict: + return _siem_engine.get_status() diff --git a/blue_agent/backend/websocket/__init__.py b/blue_agent/backend/websocket/__init__.py new file mode 100644 index 000000000..6a91cc137 --- /dev/null +++ b/blue_agent/backend/websocket/__init__.py @@ -0,0 +1 @@ +"""WebSocket endpoints for the Blue Agent.""" diff --git a/blue_agent/backend/websocket/blue_ws.py b/blue_agent/backend/websocket/blue_ws.py new file mode 100644 index 000000000..c00a32a39 --- /dev/null +++ b/blue_agent/backend/websocket/blue_ws.py @@ -0,0 +1,75 @@ +"""Live log + tool-call WebSocket stream for the Blue dashboard.""" + +from __future__ import annotations + +import asyncio +import uuid +from typing import Set + +from fastapi import APIRouter, WebSocket, WebSocketDisconnect + +from blue_agent.backend.services import blue_service + +router = APIRouter() + + +class BlueConnectionManager: + def __init__(self) -> None: + self._connections: Set[WebSocket] = set() + self._lock = asyncio.Lock() + + async def connect(self, ws: WebSocket) -> None: + await ws.accept() + async with self._lock: + self._connections.add(ws) + + async def disconnect(self, ws: WebSocket) -> None: + async with self._lock: + self._connections.discard(ws) + + async def broadcast(self, payload: dict) -> None: + async with self._lock: + stale: list[WebSocket] = [] + for ws in self._connections: + try: + await ws.send_json(payload) + except Exception: + stale.append(ws) + for ws in stale: + self._connections.discard(ws) + + +manager = BlueConnectionManager() + +# Register the broadcast callback so service layer can push events in real-time +blue_service.set_broadcast_callback(manager.broadcast) + + +@router.websocket("/ws/blue") +async def blue_log_stream(ws: WebSocket) -> None: + """Streams {type, payload} envelopes to the Blue dashboard in real-time.""" + await manager.connect(ws) + try: + # Wipe backend history so reload always starts clean + blue_service.clear_history() + # Signal the frontend to clear all existing state (fresh session) + await ws.send_json({"type": "session_start", "payload": {"session_id": str(uuid.uuid4())}}) + + # Periodic status updates + tick = 0 + while True: + await asyncio.sleep(5) + tick += 1 + + status = await blue_service.get_agent_status() + await ws.send_json({"type": "agent_status", "payload": status.model_dump(mode="json")}) + + if tick % 3 == 0: + scan_stats = blue_service.get_ssh_scan_stats() + await ws.send_json({"type": "scan_stats", "payload": scan_stats}) + await ws.send_json({"type": "heartbeat", "payload": {}}) + + except (WebSocketDisconnect, RuntimeError): + pass + finally: + await manager.disconnect(ws) diff --git a/blue_agent/blue_controller.py b/blue_agent/blue_controller.py new file mode 100644 index 000000000..b4e9ad43a --- /dev/null +++ b/blue_agent/blue_controller.py @@ -0,0 +1,274 @@ +"""Top-level orchestrator for the Blue Agent. + +Responsibilities: + 1. Start the EventBus worker. + 2. Register all event subscriptions (response_engine, isolator, auto_patcher, + defense_planner, defense_evolver). + 3. Launch all subsystem loops concurrently via asyncio.gather(): + - 3 detector loops (intrusion, anomaly, log monitor) + - Asset scanner (continuous version + CVE scanning) + - Environment manager (cloud + onprem + hybrid monitoring) + - Defense evolver (continuous learning loop) + 4. Emit blue_ready when everything is live. + 5. Expose get_status() with live counters for the FastAPI / WebSocket layer. + +Concurrency guarantee: + - All loops run in parallel — detection never waits for scanning or patching. + - The full detect → respond → patch chain completes in under 3 seconds. + - Asset scanning and environment monitoring run independently. + - The evolver adapts parameters across all subsystems continuously. + +Coverage: + - Cloud, On-Premise, and Hybrid environments monitored simultaneously. + - Web servers, databases, applications, frontends, system services scanned. + +Continuous operation: + - No periodic scheduling — all loops run continuously until stop(). + - Scan intervals tighten automatically under active threat (via evolver). +""" + +import asyncio +import logging +from datetime import datetime +from typing import Any, Dict + +from core.event_bus import event_bus +from blue_agent.detector.intrusion_detector import IntrusionDetector +from blue_agent.detector.anomaly_detector import AnomalyDetector +from blue_agent.detector.log_monitor import LogMonitor +from blue_agent.responder.response_engine import ResponseEngine +from blue_agent.responder.isolator import Isolator +from blue_agent.patcher.auto_patcher import AutoPatcher +from blue_agent.scanner.asset_scanner import AssetScanner +from blue_agent.environment.environment_manager import EnvironmentManager +from blue_agent.strategy.defense_planner import DefensePlanner +from blue_agent.strategy.defense_evolver import DefenseEvolver +from blue_agent.remediation.remediation_engine import RemediationEngine + +logger = logging.getLogger(__name__) + + +def _ts() -> str: + return datetime.now().strftime("%H:%M:%S") + + +class BlueController: + """Orchestrates all Blue Agent subsystems autonomously. + + Usage:: + + controller = BlueController() + await controller.start() # blocks — runs until stop() is called + + get_status() can be called at any time from an external coroutine + (e.g. the FastAPI service layer) to retrieve live counters. + """ + + def __init__(self) -> None: + # ── Detector layer ──────────────────────────────────────────── + self.intrusion_detector = IntrusionDetector() + self.anomaly_detector = AnomalyDetector() + self.log_monitor = LogMonitor() + + # ── Responder layer ─────────────────────────────────────────── + self.response_engine = ResponseEngine() + self.isolator = Isolator() + + # ── Patcher layer ───────────────────────────────────────────── + self.auto_patcher = AutoPatcher() + + # ── Scanner layer (NEW) ─────────────────────────────────────── + self.asset_scanner = AssetScanner() + + # ── Environment monitoring (NEW) ────────────────────────────── + self.environment_manager = EnvironmentManager() + + # ── Strategy layer (NEW — fully implemented) ────────────────── + self.defense_planner = DefensePlanner() + self.defense_evolver = DefenseEvolver() + + # ── Remediation layer (Red report → simultaneous fixes) ─────── + self.remediation_engine = RemediationEngine() + + self._running: bool = False + + # ------------------------------------------------------------------ + # Subscription wiring + # ------------------------------------------------------------------ + + def _wire_subscriptions(self) -> None: + """Register every subsystem's event subscriptions before loops start. + + Subscription order matters for the detect → respond → patch chain: + 1. ResponseEngine subscribes to all detection events. + 2. Isolator subscribes to exploit_attempted + anomaly_detected. + 3. AutoPatcher subscribes to response_complete + vulnerability_found. + 4. DefensePlanner subscribes to vulnerability_found + environment_alert. + 5. DefenseEvolver subscribes to all terminal events for learning. + """ + self.response_engine.register() + self.isolator.register() + self.auto_patcher.register() + self.defense_planner.register() + self.defense_evolver.register() + self.remediation_engine.register() + + # ------------------------------------------------------------------ + # Status + # ------------------------------------------------------------------ + + def get_status(self) -> Dict[str, Any]: + """Return live operational counters for dashboards and health checks.""" + total_detections = ( + self.intrusion_detector.detection_count + + self.anomaly_detector.detection_count + + self.log_monitor.detection_count + ) + return { + "running": self._running, + # Detection + "detection_count": total_detections, + "response_count": self.response_engine.response_count, + "patch_count": self.auto_patcher.patch_count, + "cve_fix_count": self.auto_patcher.cve_fix_count, + "isolation_count": self.isolator.isolation_count, + # Scanning + "scan_cycles": self.asset_scanner.scan_count, + "assets_discovered": self.asset_scanner.asset_count, + "vulnerable_assets": self.asset_scanner.vulnerable_count, + "total_vulnerabilities": self.asset_scanner.total_vulnerabilities, + # Environment monitoring + "environment_alerts": self.environment_manager.alert_count, + # Evolution + "evolution_rounds": self.defense_evolver.evolution_count, + "defense_plans": self.defense_planner.plans_generated, + # Remediation (Red report → Blue fix pipeline) + "remediation_findings": self.remediation_engine.findings_received, + "remediation_fixes": self.remediation_engine.fixes_dispatched, + } + + # ------------------------------------------------------------------ + # Lifecycle + # ------------------------------------------------------------------ + + async def start(self) -> None: + """Initialise and start all subsystems. + + Steps: + 1. Start EventBus worker. + 2. Wire all event subscriptions. + 3. Emit blue_ready. + 4. Launch ALL loops concurrently (asyncio.gather): + - 3 detector loops + - Asset scanner (continuous) + - Environment manager (cloud + onprem + hybrid) + - Defense evolver (continuous learning) + + This coroutine blocks until all loops exit (i.e. stop() is called). + """ + ts = _ts() + print(f"{ts} < blue_controller: Initialising Blue Agent subsystems...") + + # 1. Event bus must be running before any subscriptions fire + await event_bus.start() + + # 2. Wire subscriptions — must happen before detectors start emitting + self._wire_subscriptions() + self._running = True + + ts = _ts() + print( + f"{ts} < blue_controller: Event bus live \u2014 " + f"response_engine, isolator, auto_patcher, planner, evolver subscribed" + ) + print( + f"{ts} < blue_controller: Launching continuous loops: " + f"detection(3) + asset_scanner + env_manager(3) + evolver" + ) + + # 3. Announce readiness + await event_bus.emit("blue_ready", { + "message": "Blue Agent fully operational \u2014 continuous defense active (target: 172.25.8.172:5000)", + "subsystems": [ + "intrusion_detector", + "anomaly_detector", + "log_monitor", + "response_engine", + "isolator", + "auto_patcher", + "asset_scanner", + "environment_manager", + "defense_planner", + "defense_evolver", + "remediation_engine", + ], + "environments": ["cloud", "onprem", "hybrid"], + "primary_target": { + "ip": "172.25.8.172", + "port": 5000, + "service": "Flask/Werkzeug 3.1.8", + "endpoints": ["/login", "/search", "/profile"], + "attack_vectors": [ + "credential_bruteforce", + "sql_injection", + "directory_traversal", + "idor", + ], + }, + }) + + ts = _ts() + print( + f"{ts} < blue_controller: \u2588 BLUE AGENT ONLINE \u2588 " + f"Real-time detection, scanning, response, patching, and evolution ACTIVE" + ) + print( + f"{ts} < blue_controller: Monitoring: Cloud + On-Premise + Hybrid environments" + ) + print( + f"{ts} < blue_controller: Primary target: Flask/Werkzeug 3.1.8 @ 172.25.8.172:5000" + ) + print( + f"{ts} < blue_controller: Defending endpoints: /login /search /profile " + f"(SQLi, brute-force, IDOR, traversal)" + ) + + # 4. Run ALL loops concurrently — none blocks the others. + results = await asyncio.gather( + # Detection loops + self.intrusion_detector.start(), + self.anomaly_detector.start(), + self.log_monitor.start(), + # Asset scanning (continuous version + CVE scanning) + self.asset_scanner.start(), + # Environment monitoring (cloud + onprem + hybrid) + self.environment_manager.start(), + # Defensive evolution (continuous learning) + self.defense_evolver.start(), + return_exceptions=True, + ) + + # Log any unexpected loop exits + loop_names = [ + "intrusion_detector", "anomaly_detector", "log_monitor", + "asset_scanner", "environment_manager", "defense_evolver", + ] + for name, result in zip(loop_names, results): + if isinstance(result, Exception): + logger.error(f"BlueController: {name} exited with error: {result}") + + async def stop(self) -> None: + """Gracefully stop all loops and the event bus.""" + self._running = False + await asyncio.gather( + self.intrusion_detector.stop(), + self.anomaly_detector.stop(), + self.log_monitor.stop(), + self.asset_scanner.stop(), + self.environment_manager.stop(), + self.defense_evolver.stop(), + return_exceptions=True, + ) + await event_bus.stop() + ts = _ts() + print(f"{ts} < blue_controller: Blue Agent stopped \u2014 all subsystems offline") diff --git a/blue_agent/detector/__init__.py b/blue_agent/detector/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/blue_agent/detector/anomaly_detector.py b/blue_agent/detector/anomaly_detector.py new file mode 100644 index 000000000..bfc56477b --- /dev/null +++ b/blue_agent/detector/anomaly_detector.py @@ -0,0 +1,283 @@ +"""Real-Time Detection (Feature 1) — Unusual traffic patterns and service behaviour. + +Runs a continuous asyncio polling loop every 1 second. +Flags anomalies when: + - More than 5 port scans per second are observed + - Unexpected access occurs on sensitive ports (3306, 21, 23, 22) + - A sudden traffic spike hits any single port + +Emits anomaly_detected events via the event bus the instant a threshold +is crossed. Runs concurrently alongside intrusion_detector and log_monitor +and never blocks either of them. +""" + +import asyncio +import logging +import random +from collections import deque +from datetime import datetime +from typing import Deque, Dict + +from core.event_bus import event_bus + +logger = logging.getLogger(__name__) + +TARGET_IP = "172.25.8.172" +SENSITIVE_PORTS: set = {3306, 21, 23, 22, 5000} +SCAN_RATE_THRESHOLD = 5 # scans/second that trigger anomaly_detected +SPIKE_THRESHOLD = 8 # per-port hits/second that trigger a spike alert +SENSITIVE_ACCESS_CHANCE = 0.35 # probability per tick that a sensitive port is hit + +# Web app anomaly thresholds (Flask @ port 5000) +LOGIN_ATTEMPT_THRESHOLD = 5 # failed logins/sec triggers credential_attack_detected +SQLI_PATTERN_CHANCE = 0.30 # probability of SQLi pattern per tick +PARAM_FUZZ_THRESHOLD = 10 # parameter variations/sec triggers anomaly + + +def _ts() -> str: + return datetime.now().strftime("%H:%M:%S") + + +class AnomalyDetector: + """Monitors simulated traffic metrics for behavioural anomalies. + + Emits: + anomaly_detected — with type "scan_rate", "sensitive_port", or "traffic_spike" + """ + + def __init__(self) -> None: + # Rolling window of scan timestamps for rate calculation + self._scan_window: Deque[float] = deque(maxlen=200) + # Per-port hit counters for the current second + self._port_hits: Dict[int, int] = {} + self._running: bool = False + self.detection_count: int = 0 + # Web app anomaly tracking + self._login_attempts: Deque[float] = deque(maxlen=100) + self._param_variations: Deque[float] = deque(maxlen=200) + + # ------------------------------------------------------------------ + # Simulation helpers + # ------------------------------------------------------------------ + + def _simulate_tick(self, now: float) -> Dict: + """Produce simulated traffic metrics for a single 1-second tick.""" + scans_this_tick = random.randint(0, 12) + + # Record each scan in the rolling window + for _ in range(scans_this_tick): + self._scan_window.append(now) + + # Count scans that occurred within the last 1 second + scans_per_second = sum(1 for t in self._scan_window if now - t <= 1.0) + + # Pick a random port that Red is probing this tick + probed_port = random.choice( + [21, 22, 23, 80, 443, 3306, 5432, 8080] + ) + source_ip = f"10.0.0.{random.randint(2, 254)}" + + # Increment per-port hit counter + self._port_hits[probed_port] = self._port_hits.get(probed_port, 0) + scans_this_tick + + return { + "scans_per_second": scans_per_second, + "probed_port": probed_port, + "source_ip": source_ip, + "port_hits": self._port_hits.copy(), + } + + # ------------------------------------------------------------------ + # Detection loop + # ------------------------------------------------------------------ + + async def _detection_loop(self) -> None: + """Main loop — ticks every 1 second. Non-blocking.""" + while self._running: + try: + now = asyncio.get_event_loop().time() + metrics = self._simulate_tick(now) + ts = _ts() + + # ── Rule 1: Scan rate threshold ────────────────────────── + if metrics["scans_per_second"] > SCAN_RATE_THRESHOLD: + rate = metrics["scans_per_second"] + src = metrics["source_ip"] + print( + f"{ts} < anomaly_detector: Scan rate {rate}/s exceeds " + f"threshold ({SCAN_RATE_THRESHOLD}/s) from {src}" + ) + print( + f'{ts} > event_bus.emit("anomaly_detected", ' + f'{{"type": "scan_rate", "rate": {rate}, ' + f'"source_ip": "{src}"}})' + ) + self.detection_count += 1 + await event_bus.emit("anomaly_detected", { + "type": "scan_rate", + "rate": rate, + "source_ip": src, + "target": TARGET_IP, + }) + + # ── Rule 2: Unexpected access on sensitive port ─────────── + port = metrics["probed_port"] + if port in SENSITIVE_PORTS and random.random() < SENSITIVE_ACCESS_CHANCE: + src = metrics["source_ip"] + ts = _ts() + print( + f"{ts} < anomaly_detector: Unexpected access on " + f"sensitive port {port} from {src}" + ) + print( + f'{ts} > event_bus.emit("anomaly_detected", ' + f'{{"type": "sensitive_port", "port": {port}, ' + f'"source_ip": "{src}"}})' + ) + self.detection_count += 1 + await event_bus.emit("anomaly_detected", { + "type": "sensitive_port", + "port": port, + "source_ip": src, + "target": TARGET_IP, + }) + + # ── Rule 3: Per-port traffic spike ─────────────────────── + for p, hits in metrics["port_hits"].items(): + if hits > SPIKE_THRESHOLD: + src = metrics["source_ip"] + ts = _ts() + print( + f"{ts} < anomaly_detector: Traffic spike on port {p} " + f"— {hits} hits detected" + ) + print( + f'{ts} > event_bus.emit("anomaly_detected", ' + f'{{"type": "traffic_spike", "port": {p}, ' + f'"hits": {hits}, "source_ip": "{src}"}})' + ) + self.detection_count += 1 + await event_bus.emit("anomaly_detected", { + "type": "traffic_spike", + "port": p, + "hits": hits, + "source_ip": src, + "target": TARGET_IP, + }) + # Reset after alerting so we don't spam the same spike + self._port_hits[p] = 0 + + # ── Rule 4: Login brute-force on Flask /login ──────────── + login_burst = random.randint(0, 8) + for _ in range(login_burst): + self._login_attempts.append(now) + logins_per_sec = sum(1 for t in self._login_attempts if now - t <= 1.0) + if logins_per_sec > LOGIN_ATTEMPT_THRESHOLD: + src = metrics["source_ip"] + ts = _ts() + print( + f"{ts} < anomaly_detector: Credential brute-force detected " + f"— {logins_per_sec} login attempts/s on /login from {src}" + ) + self.detection_count += 1 + await event_bus.emit("credential_attack_detected", { + "type": "login_bruteforce", + "endpoint": "/login", + "attempts_per_sec": logins_per_sec, + "port": 5000, + "service": "flask", + "source_ip": src, + "target": TARGET_IP, + }) + + # ── Rule 5: SQL injection pattern on Flask /search ─────── + if random.random() < SQLI_PATTERN_CHANCE: + src = metrics["source_ip"] + sqli_payloads = [ + "' OR 1=1--", "'; DROP TABLE users;--", + "' UNION SELECT * FROM users--", "1' AND '1'='1", + "admin'--", "' OR ''='", + ] + payload = random.choice(sqli_payloads) + ts = _ts() + print( + f"{ts} < anomaly_detector: SQLi pattern detected " + f"on /search?q={payload} from {src}" + ) + self.detection_count += 1 + await event_bus.emit("sql_injection_attempted", { + "type": "sqli_pattern", + "endpoint": "/search", + "payload": payload, + "port": 5000, + "service": "flask", + "source_ip": src, + "target": TARGET_IP, + }) + + # ── Rule 6: IDOR / Directory traversal on /profile ─────── + if random.random() < 0.25: + src = metrics["source_ip"] + traversal_payloads = [ + "../../etc/passwd", "../../../etc/shadow", + "....//....//etc/passwd", "%2e%2e%2fetc%2fpasswd", + ] + idor_ids = ["-1", "0", "999999", "1 OR 1=1"] + if random.random() < 0.5: + payload = random.choice(traversal_payloads) + ts = _ts() + print( + f"{ts} < anomaly_detector: Directory traversal attempt " + f"on /profile?id={payload} from {src}" + ) + self.detection_count += 1 + await event_bus.emit("directory_traversal_attempted", { + "type": "path_traversal", + "endpoint": "/profile", + "payload": payload, + "port": 5000, + "service": "flask", + "source_ip": src, + "target": TARGET_IP, + }) + else: + idor_id = random.choice(idor_ids) + ts = _ts() + print( + f"{ts} < anomaly_detector: IDOR attempt " + f"on /profile?id={idor_id} from {src}" + ) + self.detection_count += 1 + await event_bus.emit("idor_attempted", { + "type": "idor", + "endpoint": "/profile", + "payload": idor_id, + "port": 5000, + "service": "flask", + "source_ip": src, + "target": TARGET_IP, + }) + + except Exception as exc: + logger.error(f"AnomalyDetector error: {exc}") + + await asyncio.sleep(1) + + # ------------------------------------------------------------------ + # Lifecycle + # ------------------------------------------------------------------ + + async def start(self) -> None: + """Start the anomaly detection loop (runs until stop() is called).""" + self._running = True + ts = _ts() + print( + f"{ts} < anomaly_detector: Anomaly detection loop started " + f"— threshold {SCAN_RATE_THRESHOLD} scans/s, " + f"sensitive ports {sorted(SENSITIVE_PORTS)}" + ) + await self._detection_loop() + + async def stop(self) -> None: + """Signal the detection loop to exit on the next tick.""" + self._running = False diff --git a/blue_agent/detector/intrusion_detector.py b/blue_agent/detector/intrusion_detector.py new file mode 100644 index 000000000..58c05be52 --- /dev/null +++ b/blue_agent/detector/intrusion_detector.py @@ -0,0 +1,181 @@ +"""Real-Time Detection (Feature 1) — Port scans and active probes. + +Runs a continuous asyncio polling loop every 1 second watching the target +system for new port probes. Emits port_probed (and port_scanned for +sensitive ports) events via the event bus the moment a probe is detected. + +Never blocks — the detection loop is a standalone coroutine that runs +concurrently alongside anomaly_detector and log_monitor. +""" + +import asyncio +import logging +import random +from datetime import datetime +from typing import Set + +from core.event_bus import event_bus + +logger = logging.getLogger(__name__) + +TARGET_IP = "172.25.8.172" + +# Ports exposed by the simulated target system +TARGET_PORTS = [21, 22, 23, 80, 443, 3306, 5000, 8080, 8443, 3389, 5432] + +# Sensitive ports that also trigger a port_scanned event (nmap-style sweep) +SENSITIVE_PORTS = {21, 22, 23, 3306, 5000, 5432} + +# Chance Red probes a port on any given tick (70 %) +PROBE_PROBABILITY = 0.70 + +# Flask/Werkzeug web app endpoints discovered on port 5000 +WEBAPP_ENDPOINTS = [ + {"path": "/login", "method": "POST", "risk": "high", "attack": "credential_bruteforce"}, + {"path": "/search?q=Widget", "method": "GET", "risk": "medium", "attack": "sql_injection"}, + {"path": "/search?q=' OR 1=1--", "method": "GET", "risk": "high", "attack": "sql_injection"}, + {"path": "/profile?id=1", "method": "GET", "risk": "medium", "attack": "idor"}, + {"path": "/profile?id=../../etc/passwd", "method": "GET", "risk": "high", "attack": "directory_traversal"}, +] + +# Chance Red targets a web endpoint on a given tick (55%) +WEBAPP_PROBE_PROBABILITY = 0.55 + + +def _ts() -> str: + return datetime.now().strftime("%H:%M:%S") + + +class IntrusionDetector: + """Continuously polls for new port probes on the target system. + + Emits: + port_probed — for every detected probe + port_scanned — additionally for sensitive ports (21, 22, 23, 3306, 5432) + """ + + def __init__(self) -> None: + self._running: bool = False + self.detection_count: int = 0 + + # ------------------------------------------------------------------ + # Internal helpers + # ------------------------------------------------------------------ + + def _simulate_probe(self) -> "tuple[int, str] | None": + """Simulate Red agent probing the target. + + Returns (port, protocol) with PROBE_PROBABILITY, else None. + """ + if random.random() < PROBE_PROBABILITY: + port = random.choice(TARGET_PORTS) + protocol = "udp" if port == 53 else "tcp" + return port, protocol + return None + + # ------------------------------------------------------------------ + # Detection loop + # ------------------------------------------------------------------ + + async def _detection_loop(self) -> None: + """Main loop — ticks every 1 second. Never blocks other loops.""" + while self._running: + try: + result = self._simulate_probe() + if result is not None: + port, protocol = result + source_ip = f"10.0.0.{random.randint(2, 254)}" + ts = _ts() + + # Log detection + print(f"{ts} < intrusion_detector: Port {port} probe detected") + print( + f'{ts} > event_bus.emit("port_probed", ' + f'{{"port": {port}, "protocol": "{protocol}"}})' + ) + + self.detection_count += 1 + await event_bus.emit("port_probed", { + "port": port, + "protocol": protocol, + "source_ip": source_ip, + "target": TARGET_IP, + }) + + # Sensitive ports also fire port_scanned (nmap sweep behaviour) + if port in SENSITIVE_PORTS: + ts = _ts() + print( + f"{ts} < intrusion_detector: " + f"Port {port} is sensitive — escalating to port_scanned" + ) + print( + f'{ts} > event_bus.emit("port_scanned", ' + f'{{"port": {port}, "protocol": "{protocol}"}})' + ) + await event_bus.emit("port_scanned", { + "port": port, + "protocol": protocol, + "source_ip": source_ip, + "target": TARGET_IP, + }) + + # ── Web application endpoint probing (Flask @ port 5000) ── + if random.random() < WEBAPP_PROBE_PROBABILITY: + endpoint = random.choice(WEBAPP_ENDPOINTS) + source_ip = f"10.0.0.{random.randint(2, 254)}" + ts = _ts() + + attack_type = endpoint["attack"] + event_map = { + "sql_injection": "sql_injection_attempted", + "credential_bruteforce": "credential_attack_detected", + "directory_traversal": "directory_traversal_attempted", + "idor": "idor_attempted", + } + event_type = event_map.get(attack_type, "webapp_attack_detected") + + print( + f"{ts} < intrusion_detector: Web attack on " + f"http://{TARGET_IP}:5000{endpoint['path']} " + f"({attack_type}, risk={endpoint['risk']})" + ) + print( + f'{ts} > event_bus.emit("{event_type}", ' + f'{{"endpoint": "{endpoint["path"]}", "attack": "{attack_type}"}})' + ) + + self.detection_count += 1 + await event_bus.emit(event_type, { + "endpoint": endpoint["path"], + "method": endpoint["method"], + "attack_type": attack_type, + "risk": endpoint["risk"], + "port": 5000, + "service": "flask", + "source_ip": source_ip, + "target": TARGET_IP, + }) + + except Exception as exc: + logger.error(f"IntrusionDetector error: {exc}") + + await asyncio.sleep(1) + + # ------------------------------------------------------------------ + # Lifecycle + # ------------------------------------------------------------------ + + async def start(self) -> None: + """Start the detection loop (runs until stop() is called).""" + self._running = True + ts = _ts() + print( + f"{ts} < intrusion_detector: Detection loop started " + f"— watching {TARGET_IP}" + ) + await self._detection_loop() + + async def stop(self) -> None: + """Signal the detection loop to exit on the next tick.""" + self._running = False diff --git a/blue_agent/detector/log_monitor.py b/blue_agent/detector/log_monitor.py new file mode 100644 index 000000000..ccb6eac99 --- /dev/null +++ b/blue_agent/detector/log_monitor.py @@ -0,0 +1,240 @@ +"""Real-Time Detection (Feature 1) — Continuously tail system logs for Red signatures. + +Maintains an internal rotating log buffer (simulating /var/log/syslog or +auth.log) that is injected with realistic Red-agent entries every 1.5 seconds. +A separate tail loop processes new lines every 1 second and pattern-matches +against known Red signatures. + +Signature → event mapping: + nmap pattern found → port_scanned + CVE lookup pattern found → cve_detected + Exploit string found → exploit_attempted + +Both loops run as asyncio coroutines — neither blocks the other or the +intrusion / anomaly detectors. +""" + +import asyncio +import logging +import random +import re +from collections import deque +from datetime import datetime +from typing import Deque, List, Tuple + +from core.event_bus import event_bus + +logger = logging.getLogger(__name__) + +TARGET_IP = "172.25.8.172" + +# --------------------------------------------------------------------------- +# Simulated Red-agent log templates +# Each entry is (template_string, signature_category) +# --------------------------------------------------------------------------- +RED_LOG_TEMPLATES: List[Tuple[str, str]] = [ + # nmap patterns → port_scanned + ("nmap -sV -p {port} {target}", "nmap"), + ("nmap -sS --open -T4 {target}", "nmap"), + ("nmap -A -p- {target}", "nmap"), + ("nmap -sU --top-ports 100 {target}", "nmap"), + # CVE lookup patterns → cve_detected + ("searchsploit CVE-{year}-{cve_id}", "cve_lookup"), + ("curl https://nvd.nist.gov/vuln/detail/CVE-{year}-{cve_id}", "cve_lookup"), + ("python3 cve_check.py --id CVE-{year}-{cve_id} --target {target}", "cve_lookup"), + # Exploit strings → exploit_attempted + ("msfconsole -x 'use exploit/multi/handler; set LHOST {target}; run'", "exploit"), + ("python3 exploit_{service}.py --target {target} --port {port}", "exploit"), + ("hydra -l admin -P /usr/share/wordlists/rockyou.txt {target} {service}", "exploit"), + ("./exploit.sh --rhost {target} --rport {port} --payload reverse_shell", "exploit"), + # SQL injection attacks on Flask /search endpoint → sql_injection + ("sqlmap -u http://{target}:5000/search?q=test --dbs --level=5 --risk=3", "sql_injection"), + ("sqlmap -u http://{target}:5000/search?q=Widget --batch --dump", "sql_injection"), + ("curl 'http://{target}:5000/search?q=%27+OR+1%3D1--'", "sql_injection"), + ("python3 sqli_exploit.py --url http://{target}:5000/search --param q", "sql_injection"), + # Credential brute-force on Flask /login → credential_attack + ("hydra -l admin -P /usr/share/wordlists/rockyou.txt {target} http-post-form '/login:username=^USER^&password=^PASS^:Invalid'", "credential_attack"), + ("python3 brute_login.py --url http://{target}:5000/login --wordlist rockyou.txt", "credential_attack"), + ("curl -X POST http://{target}:5000/login -d 'username=admin&password=admin123'", "credential_attack"), + ("wfuzz -z file,passwords.txt -d 'username=admin&password=FUZZ' http://{target}:5000/login", "credential_attack"), + # Directory traversal on Flask /profile → directory_traversal + ("curl 'http://{target}:5000/profile?id=../../etc/passwd'", "directory_traversal"), + ("curl 'http://{target}:5000/profile?id=....//....//etc/shadow'", "directory_traversal"), + ("dotdotpwn -m http -h {target} -x 5000 -f /etc/passwd -k root", "directory_traversal"), + # IDOR on Flask /profile → idor + ("python3 idor_enum.py --url http://{target}:5000/profile --param id --range 1-1000", "idor"), + ("curl 'http://{target}:5000/profile?id=2' -H 'Cookie: session=user1_token'", "idor"), +] + +# Signature category → event type +SIGNATURE_TO_EVENT = { + "nmap": "port_scanned", + "cve_lookup": "cve_detected", + "exploit": "exploit_attempted", + "sql_injection": "sql_injection_attempted", + "credential_attack": "credential_attack_detected", + "directory_traversal": "directory_traversal_attempted", + "idor": "idor_attempted", +} + +# Regex to extract CVE IDs from log lines +CVE_REGEX = re.compile(r"CVE-(\d{4})-(\d+)") + +PORT_SERVICE_MAP = { + 21: "ftp", + 22: "ssh", + 23: "telnet", + 80: "http", + 443: "https", + 3306: "mysql", + 5000: "flask", + 8080: "http", + 5432: "postgresql", +} + + +def _ts() -> str: + return datetime.now().strftime("%H:%M:%S") + + +def _render_template(template: str) -> str: + """Fill a log template with random but plausible values.""" + port = random.choice(list(PORT_SERVICE_MAP.keys())) + service = PORT_SERVICE_MAP.get(port, "http") + return template.format( + port=port, + target=TARGET_IP, + year=random.randint(2020, 2024), + cve_id=random.randint(10000, 99999), + service=service, + ) + + +class LogMonitor: + """Tails an internal simulated log buffer and pattern-matches Red signatures. + + Emits: + port_scanned — when an nmap pattern is found + cve_detected — when a CVE lookup pattern is found + exploit_attempted — when an exploit string is found + """ + + def __init__(self) -> None: + self._log_buffer: Deque[Tuple[str, str]] = deque(maxlen=500) + # (log_line, signature_category) + self._running: bool = False + self._inject_task: "asyncio.Task | None" = None + self._cursor: int = 0 # how many buffer entries have been processed + self.detection_count: int = 0 + + # ------------------------------------------------------------------ + # Log injection (simulates Red agent writing to system logs) + # ------------------------------------------------------------------ + + async def _inject_logs(self) -> None: + """Inject 1–3 Red log entries into the buffer every 1.5 seconds.""" + while self._running: + count = random.randint(1, 3) + for _ in range(count): + template, sig_type = random.choice(RED_LOG_TEMPLATES) + line = _render_template(template) + timestamped = f"{_ts()} {line}" + self._log_buffer.append((timestamped, sig_type)) + await asyncio.sleep(1.5) + + # ------------------------------------------------------------------ + # Log tailing (processes new buffer entries, matches signatures) + # ------------------------------------------------------------------ + + def _extract_context(self, line: str) -> dict: + """Pull port, service, CVE, and source_ip from a log line.""" + ctx: dict = {"target": TARGET_IP, "source_ip": f"10.0.0.{random.randint(2, 254)}"} + + # Port + for p in sorted(PORT_SERVICE_MAP.keys(), reverse=True): + if str(p) in line: + ctx["port"] = p + ctx["service"] = PORT_SERVICE_MAP[p] + break + + # Service keyword fallback + if "service" not in ctx: + for svc in PORT_SERVICE_MAP.values(): + if svc in line.lower(): + ctx["service"] = svc + break + + # CVE + cve_match = CVE_REGEX.search(line) + if cve_match: + ctx["cve_id"] = f"CVE-{cve_match.group(1)}-{cve_match.group(2)}" + ctx["service_name"] = ctx.get("service", "unknown") + + return ctx + + async def _tail_loop(self) -> None: + """Process new log buffer entries every 1 second. Non-blocking.""" + while self._running: + try: + buffer_snapshot = list(self._log_buffer) + new_entries = buffer_snapshot[self._cursor:] + self._cursor = len(buffer_snapshot) + + for line, sig_type in new_entries: + event_type = SIGNATURE_TO_EVENT.get(sig_type) + if not event_type: + continue + + ctx = self._extract_context(line) + ts = _ts() + + label_map = { + "port_scanned": "nmap pattern", + "cve_detected": "CVE lookup pattern", + "exploit_attempted": "exploit string", + } + label = label_map.get(event_type, sig_type) + + print( + f"{ts} < log_monitor: {label} found in logs " + f"→ emitting {event_type}" + ) + print( + f'{ts} > event_bus.emit("{event_type}", {ctx})' + ) + + self.detection_count += 1 + await event_bus.emit(event_type, ctx) + + except Exception as exc: + logger.error(f"LogMonitor tail error: {exc}") + + await asyncio.sleep(1) + + # ------------------------------------------------------------------ + # Lifecycle + # ------------------------------------------------------------------ + + async def start(self) -> None: + """Start log injection and tailing concurrently.""" + self._running = True + ts = _ts() + print( + f"{ts} < log_monitor: Log monitoring started " + f"— tailing internal buffer for Red signatures" + ) + # Run injection as a background task so the tail loop can await + self._inject_task = asyncio.create_task( + self._inject_logs(), name="log_injector" + ) + await self._tail_loop() + + async def stop(self) -> None: + """Stop both the tail loop and the injection task.""" + self._running = False + if self._inject_task and not self._inject_task.done(): + self._inject_task.cancel() + try: + await self._inject_task + except asyncio.CancelledError: + pass diff --git a/blue_agent/environment/__init__.py b/blue_agent/environment/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/blue_agent/environment/environment_manager.py b/blue_agent/environment/environment_manager.py new file mode 100644 index 000000000..33c320a47 --- /dev/null +++ b/blue_agent/environment/environment_manager.py @@ -0,0 +1,391 @@ +from __future__ import annotations + +"""Multi-environment monitoring — Cloud, On-Premise, and Hybrid. + +Runs three parallel monitoring loops, one per environment class. +Each loop checks environment-specific security controls and emits +alerts when misconfigurations or policy violations are detected. + +Cloud monitoring checks: + - Public S3 buckets / storage exposure + - Overly permissive security groups / firewall rules + - IAM misconfigurations (wildcard policies, no MFA) + - Unencrypted data stores + - Exposed cloud metadata endpoints + +On-Premise monitoring checks: + - Unpatched OS / services + - Weak authentication (default creds, no key-based auth) + - Open management ports (telnet, RDP) + - Missing network segmentation + - Disabled audit logging + +Hybrid monitoring checks: + - VPN / tunnel misconfigurations + - Cross-environment traffic anomalies + - Certificate expiration + - DNS configuration drift + - Inconsistent firewall rules between environments + +All monitoring is simulated — no real infrastructure calls. +Continuous operation: loops never stop until stop() is called. +""" + +import asyncio +import logging +import random +import time +from dataclasses import dataclass, field +from datetime import datetime +from typing import Any, Dict, List, Optional, Set + +from core.event_bus import event_bus + +logger = logging.getLogger(__name__) + + +def _ts() -> str: + return datetime.now().strftime("%H:%M:%S") + + +@dataclass +class EnvironmentAlert: + """A security alert from environment monitoring.""" + alert_id: str + environment: str + category: str + severity: str + title: str + description: str + resource: str + recommendation: str + timestamp: float = field(default_factory=time.time) + + def to_dict(self) -> Dict[str, Any]: + return { + "alert_id": self.alert_id, + "environment": self.environment, + "category": self.category, + "severity": self.severity, + "title": self.title, + "description": self.description, + "resource": self.resource, + "recommendation": self.recommendation, + "timestamp": self.timestamp, + } + + +# --------------------------------------------------------------------------- +# Alert templates per environment +# --------------------------------------------------------------------------- + +_CLOUD_ALERTS = [ + { + "category": "storage", + "severity": "critical", + "title": "Public S3 bucket detected", + "description": "Bucket 'app-data-prod' has public read ACL enabled", + "resource": "s3://app-data-prod", + "recommendation": "Remove public ACL; apply bucket policy with explicit deny on s3:GetObject for *", + }, + { + "category": "iam", + "severity": "high", + "title": "IAM policy with wildcard actions", + "description": "Role 'dev-admin' has Action: * on Resource: * — violates least privilege", + "resource": "iam:role/dev-admin", + "recommendation": "Scope IAM policy to specific services and resources; enable MFA for console access", + }, + { + "category": "network", + "severity": "high", + "title": "Security group allows 0.0.0.0/0 on port 22", + "description": "Security group 'sg-webservers' permits SSH from any IP", + "resource": "ec2:sg/sg-webservers", + "recommendation": "Restrict SSH to VPN CIDR or bastion host IP only", + }, + { + "category": "encryption", + "severity": "high", + "title": "Unencrypted RDS instance", + "description": "RDS instance 'prod-db' does not have encryption at rest enabled", + "resource": "rds:instance/prod-db", + "recommendation": "Enable encryption at rest; create encrypted snapshot and restore", + }, + { + "category": "metadata", + "severity": "critical", + "title": "Cloud metadata endpoint exposed", + "description": "EC2 instance metadata v1 accessible — SSRF risk for credential theft", + "resource": "ec2:instance/i-0abc123", + "recommendation": "Enforce IMDSv2 (require token); block metadata endpoint from application layer", + }, + { + "category": "logging", + "severity": "medium", + "title": "CloudTrail logging disabled", + "description": "CloudTrail is not enabled in us-west-2 region", + "resource": "cloudtrail:us-west-2", + "recommendation": "Enable CloudTrail with multi-region logging and S3 delivery", + }, +] + +_ONPREM_ALERTS = [ + { + "category": "authentication", + "severity": "critical", + "title": "Default credentials on database", + "description": "MySQL on 192.168.1.13:3306 accepts root login with default password", + "resource": "192.168.1.13:3306/mysql", + "recommendation": "Change root password; disable remote root login; enforce password policy", + }, + { + "category": "patching", + "severity": "high", + "title": "OS kernel outdated", + "description": "Server 192.168.1.10 running kernel 5.4.0 — 47 known CVEs", + "resource": "192.168.1.10/kernel", + "recommendation": "Apply pending kernel updates; schedule reboot for maintenance window", + }, + { + "category": "network", + "severity": "critical", + "title": "Telnet service running", + "description": "Telnet daemon active on 192.168.1.17:23 — cleartext protocol", + "resource": "192.168.1.17:23/telnet", + "recommendation": "Disable telnet; migrate to SSH; block port 23 at firewall", + }, + { + "category": "segmentation", + "severity": "high", + "title": "No network segmentation", + "description": "Database subnet 192.168.1.0/24 is directly reachable from DMZ", + "resource": "192.168.1.0/24", + "recommendation": "Implement VLAN segmentation; add firewall rules between zones", + }, + { + "category": "audit", + "severity": "medium", + "title": "Audit logging disabled", + "description": "Server 192.168.1.11 has auditd service stopped", + "resource": "192.168.1.11/auditd", + "recommendation": "Enable and start auditd; configure rules for privileged commands", + }, + { + "category": "authentication", + "severity": "high", + "title": "Password-based SSH enabled", + "description": "SSH on 192.168.1.11:22 allows password authentication", + "resource": "192.168.1.11:22/ssh", + "recommendation": "Set PasswordAuthentication no in sshd_config; enforce key-based auth", + }, +] + +_HYBRID_ALERTS = [ + { + "category": "vpn", + "severity": "high", + "title": "VPN tunnel using deprecated cipher", + "description": "Site-to-site VPN uses 3DES cipher — vulnerable to Sweet32 attack", + "resource": "vpn:tunnel/site-to-cloud", + "recommendation": "Migrate to AES-256-GCM cipher suite; update both endpoints", + }, + { + "category": "certificate", + "severity": "high", + "title": "TLS certificate expiring in 7 days", + "description": "Certificate for *.hybrid.internal expires 2026-04-23", + "resource": "cert:*.hybrid.internal", + "recommendation": "Renew certificate; configure auto-renewal via ACME/Let's Encrypt", + }, + { + "category": "dns", + "severity": "medium", + "title": "DNS configuration drift detected", + "description": "Internal DNS zone differs between cloud and on-prem resolvers", + "resource": "dns:zone/internal", + "recommendation": "Synchronize DNS zones; implement split-horizon DNS properly", + }, + { + "category": "firewall", + "severity": "high", + "title": "Inconsistent firewall rules", + "description": "Cloud security group allows port 8080 but on-prem firewall blocks it — service unreachable", + "resource": "firewall:cross-env/8080", + "recommendation": "Audit and reconcile firewall rules across environments; use IaC for consistency", + }, + { + "category": "traffic", + "severity": "critical", + "title": "Anomalous cross-environment traffic", + "description": "Unusual data transfer from onprem DB to cloud storage (15GB in 1 hour)", + "resource": "traffic:onprem->cloud", + "recommendation": "Investigate data exfiltration; check backup schedules; review DLP policies", + }, + { + "category": "identity", + "severity": "high", + "title": "Federated identity sync failure", + "description": "LDAP-to-cloud IAM sync failed 3 times — stale credentials may be active", + "resource": "identity:federation", + "recommendation": "Fix LDAP connector; force credential rotation for affected accounts", + }, +] + + +class EnvironmentManager: + """Monitors Cloud, On-Premise, and Hybrid environments continuously. + + Runs three parallel loops, one per environment type. + Emits environment_alert and misconfig_found events. + + Usage:: + + mgr = EnvironmentManager() + await mgr.start() # blocks — runs until stop() + """ + + def __init__(self) -> None: + self._running: bool = False + self.alerts: List[EnvironmentAlert] = [] + self.alert_count: int = 0 + self._alert_counter: int = 0 + self._emitted_alerts: Set[str] = set() + + # Per-environment monitoring intervals (seconds) + self.cloud_interval: float = 6.0 + self.onprem_interval: float = 5.0 + self.hybrid_interval: float = 7.0 + + # ------------------------------------------------------------------ + # Lifecycle + # ------------------------------------------------------------------ + + async def start(self) -> None: + """Start all three environment monitoring loops in parallel.""" + self._running = True + ts = _ts() + print( + f"{ts} < env_manager: Starting multi-environment monitoring " + f"(cloud={self.cloud_interval}s, onprem={self.onprem_interval}s, " + f"hybrid={self.hybrid_interval}s)" + ) + + await asyncio.gather( + self._monitor_cloud(), + self._monitor_onprem(), + self._monitor_hybrid(), + return_exceptions=True, + ) + + async def stop(self) -> None: + self._running = False + + # ------------------------------------------------------------------ + # Environment-specific monitoring loops + # ------------------------------------------------------------------ + + async def _monitor_cloud(self) -> None: + """Continuous cloud environment monitoring.""" + while self._running: + await self._check_environment("cloud", _CLOUD_ALERTS) + await asyncio.sleep(self.cloud_interval) + + async def _monitor_onprem(self) -> None: + """Continuous on-premise environment monitoring.""" + while self._running: + await self._check_environment("onprem", _ONPREM_ALERTS) + await asyncio.sleep(self.onprem_interval) + + async def _monitor_hybrid(self) -> None: + """Continuous hybrid environment monitoring.""" + while self._running: + await self._check_environment("hybrid", _HYBRID_ALERTS) + await asyncio.sleep(self.hybrid_interval) + + async def _check_environment( + self, env_name: str, alert_templates: List[Dict[str, Any]] + ) -> None: + """Check one environment for issues. Probabilistically triggers alerts.""" + for template in alert_templates: + # Each check has a probability of firing per cycle + # Higher severity = more likely to fire (simulates persistent issues) + threshold = {"critical": 0.30, "high": 0.25, "medium": 0.15}.get( + template["severity"], 0.10 + ) + + if random.random() > threshold: + continue + + # Deduplicate: don't re-alert on same issue within a session + dedup_key = f"{env_name}:{template['category']}:{template['title']}" + if dedup_key in self._emitted_alerts: + continue + self._emitted_alerts.add(dedup_key) + + self._alert_counter += 1 + alert = EnvironmentAlert( + alert_id=f"ENV-{self._alert_counter:04d}", + environment=env_name, + category=template["category"], + severity=template["severity"], + title=template["title"], + description=template["description"], + resource=template["resource"], + recommendation=template["recommendation"], + ) + self.alerts.append(alert) + self.alert_count += 1 + + ts = _ts() + sev_tag = alert.severity.upper() + print( + f"{ts} < env_manager: [{env_name.upper()}] [{sev_tag}] " + f"{alert.title} — {alert.resource}" + ) + + # Emit to event bus + await event_bus.emit("environment_alert", alert.to_dict()) + + # Critical/high findings also go through misconfig_found for response chain + if alert.severity in ("critical", "high"): + await event_bus.emit("misconfig_found", { + "environment": env_name, + "category": alert.category, + "severity": alert.severity, + "description": alert.description, + "resource": alert.resource, + "recommendation": alert.recommendation, + }) + + await asyncio.sleep(0.02) + + # ------------------------------------------------------------------ + # Query API + # ------------------------------------------------------------------ + + def get_alerts(self, environment: Optional[str] = None) -> List[Dict[str, Any]]: + """Return alerts, optionally filtered by environment.""" + alerts = self.alerts + if environment: + alerts = [a for a in alerts if a.environment == environment] + return [a.to_dict() for a in alerts] + + def get_stats(self) -> Dict[str, Any]: + """Return monitoring statistics.""" + by_env = {"cloud": 0, "onprem": 0, "hybrid": 0} + by_severity = {"critical": 0, "high": 0, "medium": 0, "low": 0} + by_category: Dict[str, int] = {} + + for alert in self.alerts: + by_env[alert.environment] = by_env.get(alert.environment, 0) + 1 + by_severity[alert.severity] = by_severity.get(alert.severity, 0) + 1 + by_category[alert.category] = by_category.get(alert.category, 0) + 1 + + return { + "total_alerts": self.alert_count, + "by_environment": by_env, + "by_severity": by_severity, + "by_category": by_category, + "monitoring_active": self._running, + } diff --git a/blue_agent/frontend/Dockerfile b/blue_agent/frontend/Dockerfile new file mode 100644 index 000000000..95ee2416d --- /dev/null +++ b/blue_agent/frontend/Dockerfile @@ -0,0 +1,34 @@ +# syntax=docker/dockerfile:1.6 +# Blue Agent Frontend — Vite + React, served via Nginx on port 3002 +# +# Multi-stage build: +# 1. `build` stage compiles the Vite app with Node 20 alpine. +# 2. `production` stage serves the static output via Nginx. +# +# In docker-compose the `build` stage is reused with `target: build` +# to run the Vite dev server with hot reload. + +# ---------- Stage 1: build ---------- +FROM node:20-alpine AS build + +WORKDIR /app + +COPY package*.json ./ +RUN npm install + +COPY . . + +RUN npm run build + +# ---------- Stage 2: production ---------- +FROM nginx:alpine AS production + +# Remove the default nginx site and drop in the Blue config (listens on 3002). +RUN rm -f /etc/nginx/conf.d/default.conf +COPY nginx.conf /etc/nginx/conf.d/default.conf + +COPY --from=build /app/dist /usr/share/nginx/html + +EXPOSE 3002 + +CMD ["nginx", "-g", "daemon off;"] diff --git a/blue_agent/frontend/index.html b/blue_agent/frontend/index.html new file mode 100644 index 000000000..e5a8ab3dc --- /dev/null +++ b/blue_agent/frontend/index.html @@ -0,0 +1,12 @@ + + + + + + HTF :: Blue Team + + +
+ + + diff --git a/blue_agent/frontend/nginx.conf b/blue_agent/frontend/nginx.conf new file mode 100644 index 000000000..7bb11c64f --- /dev/null +++ b/blue_agent/frontend/nginx.conf @@ -0,0 +1,29 @@ +server { + listen 3002; + server_name _; + + root /usr/share/nginx/html; + index index.html; + + # Client-side routing (React/Vite SPA) — fall back to index.html. + location / { + try_files $uri $uri/ /index.html; + } + + # Proxy REST traffic to the Blue backend. + location /api/ { + proxy_pass http://blue-backend:8002/; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + } + + # Proxy the live-log WebSocket to the Blue backend. + location /ws/ { + proxy_pass http://blue-backend:8002; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + proxy_read_timeout 3600s; + } +} diff --git a/blue_agent/frontend/package-lock.json b/blue_agent/frontend/package-lock.json new file mode 100644 index 000000000..fc9ca2924 --- /dev/null +++ b/blue_agent/frontend/package-lock.json @@ -0,0 +1,2017 @@ +{ + "name": "htf-blue-frontend", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "htf-blue-frontend", + "version": "0.1.0", + "dependencies": { + "axios": "^1.7.7", + "react": "^18.3.1", + "react-dom": "^18.3.1" + }, + "devDependencies": { + "@types/react": "^18.3.11", + "@types/react-dom": "^18.3.0", + "@vitejs/plugin-react": "^4.3.2", + "typescript": "^5.6.2", + "vite": "^5.4.8" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.28.5", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.29.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.28.6", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.29.2", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.29.2.tgz", + "integrity": "sha512-HoGuUs4sCZNezVEKdVcwqmZN8GoHirLUcLaYVNBK2J0DadGtdcqgr3BCbvH8+XUo4NGjNl3VOtSjEKNzqfFgKw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.29.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.2.tgz", + "integrity": "sha512-4GgRzy/+fsBa72/RZVJmGKPmZu9Byn8o4MoLpmNe1m8ZfYnz5emHLQz3U4gLud6Zwl0RZIcgiLD7Uq7ySFuDLA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.29.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.27", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", + "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.60.1.tgz", + "integrity": "sha512-d6FinEBLdIiK+1uACUttJKfgZREXrF0Qc2SmLII7W2AD8FfiZ9Wjd+rD/iRuf5s5dWrr1GgwXCvPqOuDquOowA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.60.1.tgz", + "integrity": "sha512-YjG/EwIDvvYI1YvYbHvDz/BYHtkY4ygUIXHnTdLhG+hKIQFBiosfWiACWortsKPKU/+dUwQQCKQM3qrDe8c9BA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.60.1.tgz", + "integrity": "sha512-mjCpF7GmkRtSJwon+Rq1N8+pI+8l7w5g9Z3vWj4T7abguC4Czwi3Yu/pFaLvA3TTeMVjnu3ctigusqWUfjZzvw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.60.1.tgz", + "integrity": "sha512-haZ7hJ1JT4e9hqkoT9R/19XW2QKqjfJVv+i5AGg57S+nLk9lQnJ1F/eZloRO3o9Scy9CM3wQ9l+dkXtcBgN5Ew==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.60.1.tgz", + "integrity": "sha512-czw90wpQq3ZsAVBlinZjAYTKduOjTywlG7fEeWKUA7oCmpA8xdTkxZZlwNJKWqILlq0wehoZcJYfBvOyhPTQ6w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.60.1.tgz", + "integrity": "sha512-KVB2rqsxTHuBtfOeySEyzEOB7ltlB/ux38iu2rBQzkjbwRVlkhAGIEDiiYnO2kFOkJp+Z7pUXKyrRRFuFUKt+g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.60.1.tgz", + "integrity": "sha512-L+34Qqil+v5uC0zEubW7uByo78WOCIrBvci69E7sFASRl0X7b/MB6Cqd1lky/CtcSVTydWa2WZwFuWexjS5o6g==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.60.1.tgz", + "integrity": "sha512-n83O8rt4v34hgFzlkb1ycniJh7IR5RCIqt6mz1VRJD6pmhRi0CXdmfnLu9dIUS6buzh60IvACM842Ffb3xd6Gg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.60.1.tgz", + "integrity": "sha512-Nql7sTeAzhTAja3QXeAI48+/+GjBJ+QmAH13snn0AJSNL50JsDqotyudHyMbO2RbJkskbMbFJfIJKWA6R1LCJQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.60.1.tgz", + "integrity": "sha512-+pUymDhd0ys9GcKZPPWlFiZ67sTWV5UU6zOJat02M1+PiuSGDziyRuI/pPue3hoUwm2uGfxdL+trT6Z9rxnlMA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.60.1.tgz", + "integrity": "sha512-VSvgvQeIcsEvY4bKDHEDWcpW4Yw7BtlKG1GUT4FzBUlEKQK0rWHYBqQt6Fm2taXS+1bXvJT6kICu5ZwqKCnvlQ==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.60.1.tgz", + "integrity": "sha512-4LqhUomJqwe641gsPp6xLfhqWMbQV04KtPp7/dIp0nzPxAkNY1AbwL5W0MQpcalLYk07vaW9Kp1PBhdpZYYcEw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.60.1.tgz", + "integrity": "sha512-tLQQ9aPvkBxOc/EUT6j3pyeMD6Hb8QF2BTBnCQWP/uu1lhc9AIrIjKnLYMEroIz/JvtGYgI9dF3AxHZNaEH0rw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.60.1.tgz", + "integrity": "sha512-RMxFhJwc9fSXP6PqmAz4cbv3kAyvD1etJFjTx4ONqFP9DkTkXsAMU4v3Vyc5BgzC+anz7nS/9tp4obsKfqkDHg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.60.1.tgz", + "integrity": "sha512-QKgFl+Yc1eEk6MmOBfRHYF6lTxiiiV3/z/BRrbSiW2I7AFTXoBFvdMEyglohPj//2mZS4hDOqeB0H1ACh3sBbg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.60.1.tgz", + "integrity": "sha512-RAjXjP/8c6ZtzatZcA1RaQr6O1TRhzC+adn8YZDnChliZHviqIjmvFwHcxi4JKPSDAt6Uhf/7vqcBzQJy0PDJg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.60.1.tgz", + "integrity": "sha512-wcuocpaOlaL1COBYiA89O6yfjlp3RwKDeTIA0hM7OpmhR1Bjo9j31G1uQVpDlTvwxGn2nQs65fBFL5UFd76FcQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.60.1.tgz", + "integrity": "sha512-77PpsFQUCOiZR9+LQEFg9GClyfkNXj1MP6wRnzYs0EeWbPcHs02AXu4xuUbM1zhwn3wqaizle3AEYg5aeoohhg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.60.1.tgz", + "integrity": "sha512-5cIATbk5vynAjqqmyBjlciMJl1+R/CwX9oLk/EyiFXDWd95KpHdrOJT//rnUl4cUcskrd0jCCw3wpZnhIHdD9w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.60.1.tgz", + "integrity": "sha512-cl0w09WsCi17mcmWqqglez9Gk8isgeWvoUZ3WiJFYSR3zjBQc2J5/ihSjpl+VLjPqjQ/1hJRcqBfLjssREQILw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.60.1.tgz", + "integrity": "sha512-4Cv23ZrONRbNtbZa37mLSueXUCtN7MXccChtKpUnQNgF010rjrjfHx3QxkS2PI7LqGT5xXyYs1a7LbzAwT0iCA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.60.1.tgz", + "integrity": "sha512-i1okWYkA4FJICtr7KpYzFpRTHgy5jdDbZiWfvny21iIKky5YExiDXP+zbXzm3dUcFpkEeYNHgQ5fuG236JPq0g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.60.1.tgz", + "integrity": "sha512-u09m3CuwLzShA0EYKMNiFgcjjzwqtUMLmuCJLeZWjjOYA3IT2Di09KaxGBTP9xVztWyIWjVdsB2E9goMjZvTQg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.60.1.tgz", + "integrity": "sha512-k+600V9Zl1CM7eZxJgMyTUzmrmhB/0XZnF4pRypKAlAgxmedUA+1v9R+XOFv56W4SlHEzfeMtzujLJD22Uz5zg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.60.1.tgz", + "integrity": "sha512-lWMnixq/QzxyhTV6NjQJ4SFo1J6PvOX8vUx5Wb4bBPsEb+8xZ89Bz6kOXpfXj9ak9AHTQVQzlgzBEc1SyM27xQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/prop-types": { + "version": "15.7.15", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz", + "integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "18.3.28", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.28.tgz", + "integrity": "sha512-z9VXpC7MWrhfWipitjNdgCauoMLRdIILQsAEV+ZesIzBq/oUlxk0m3ApZuMFCXdnS4U7KrI+l3WRUEGQ8K1QKw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@types/prop-types": "*", + "csstype": "^3.2.2" + } + }, + "node_modules/@types/react-dom": { + "version": "18.3.7", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.7.tgz", + "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^18.0.0" + } + }, + "node_modules/@vitejs/plugin-react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", + "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-beta.27", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.17.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT" + }, + "node_modules/axios": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.15.0.tgz", + "integrity": "sha512-wWyJDlAatxk30ZJer+GeCWS209sA42X+N5jU2jy6oHTp7ufw8uzUTVFBX9+wTfAlhiJXGS0Bq7X6efruWjuK9Q==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.11", + "form-data": "^4.0.5", + "proxy-from-env": "^2.1.0" + } + }, + "node_modules/baseline-browser-mapping": { + "version": "2.10.19", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.19.tgz", + "integrity": "sha512-qCkNLi2sfBOn8XhZQ0FXsT1Ki/Yo5P90hrkRamVFRS7/KV9hpfA4HkoWNU152+8w0zPjnxo5psx5NL3PSGgv5g==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.cjs" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/browserslist": { + "version": "4.28.2", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.2.tgz", + "integrity": "sha512-48xSriZYYg+8qXna9kwqjIVzuQxi+KYWp2+5nCYnYKPTr0LvD89Jqk2Or5ogxz0NUMfIjhh2lIUX/LyX9B4oIg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "peer": true, + "dependencies": { + "baseline-browser-mapping": "^2.10.12", + "caniuse-lite": "^1.0.30001782", + "electron-to-chromium": "^1.5.328", + "node-releases": "^2.0.36", + "update-browserslist-db": "^1.2.3" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001788", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001788.tgz", + "integrity": "sha512-6q8HFp+lOQtcf7wBK+uEenxymVWkGKkjFpCvw5W25cmMwEDU45p1xQFBQv8JDlMMry7eNxyBaR+qxgmTUZkIRQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.338", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.338.tgz", + "integrity": "sha512-KVQQ3xko9/coDX3qXLUEEbqkKT8L+1DyAovrtu0Khtrt9wjSZ+7CZV4GVzxFy9Oe1NbrIU1oVXCwHJruIA1PNg==", + "dev": true, + "license": "ISC" + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/follow-redirects": { + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.16.0.tgz", + "integrity": "sha512-y5rN/uOsadFT/JfYwhxRS5R7Qce+g3zG97+JrtFZlC9klX/W5hD7iiLzScI4nZqUS7DNUdhPgw4xI8W2LuXlUw==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz", + "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "license": "MIT" + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/node-releases": { + "version": "2.0.37", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.37.tgz", + "integrity": "sha512-1h5gKZCF+pO/o3Iqt5Jp7wc9rH3eJJ0+nh/CIoiRwjRxde/hAHyLPXYN4V3CqKAbiZPSeJFSWHmJsbkicta0Eg==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/postcss": { + "version": "8.5.10", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.10.tgz", + "integrity": "sha512-pMMHxBOZKFU6HgAZ4eyGnwXF/EvPGGqUr0MnZ5+99485wwW41kW91A4LOGxSHhgugZmSChL5AlElNdwlNgcnLQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/proxy-from-env": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-2.1.0.tgz", + "integrity": "sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/react-refresh": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", + "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/rollup": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.60.1.tgz", + "integrity": "sha512-VmtB2rFU/GroZ4oL8+ZqXgSA38O6GR8KSIvWmEFv63pQ0G6KaBH9s07PO8XTXP4vI+3UJUEypOfjkGfmSBBR0w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.60.1", + "@rollup/rollup-android-arm64": "4.60.1", + "@rollup/rollup-darwin-arm64": "4.60.1", + "@rollup/rollup-darwin-x64": "4.60.1", + "@rollup/rollup-freebsd-arm64": "4.60.1", + "@rollup/rollup-freebsd-x64": "4.60.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.60.1", + "@rollup/rollup-linux-arm-musleabihf": "4.60.1", + "@rollup/rollup-linux-arm64-gnu": "4.60.1", + "@rollup/rollup-linux-arm64-musl": "4.60.1", + "@rollup/rollup-linux-loong64-gnu": "4.60.1", + "@rollup/rollup-linux-loong64-musl": "4.60.1", + "@rollup/rollup-linux-ppc64-gnu": "4.60.1", + "@rollup/rollup-linux-ppc64-musl": "4.60.1", + "@rollup/rollup-linux-riscv64-gnu": "4.60.1", + "@rollup/rollup-linux-riscv64-musl": "4.60.1", + "@rollup/rollup-linux-s390x-gnu": "4.60.1", + "@rollup/rollup-linux-x64-gnu": "4.60.1", + "@rollup/rollup-linux-x64-musl": "4.60.1", + "@rollup/rollup-openbsd-x64": "4.60.1", + "@rollup/rollup-openharmony-arm64": "4.60.1", + "@rollup/rollup-win32-arm64-msvc": "4.60.1", + "@rollup/rollup-win32-ia32-msvc": "4.60.1", + "@rollup/rollup-win32-x64-gnu": "4.60.1", + "@rollup/rollup-win32-x64-msvc": "4.60.1", + "fsevents": "~2.3.2" + } + }, + "node_modules/scheduler": { + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/vite": { + "version": "5.4.21", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + } + } +} diff --git a/blue_agent/frontend/package.json b/blue_agent/frontend/package.json new file mode 100644 index 000000000..4fd873441 --- /dev/null +++ b/blue_agent/frontend/package.json @@ -0,0 +1,24 @@ +{ + "name": "htf-blue-frontend", + "private": true, + "version": "0.1.0", + "type": "module", + "scripts": { + "dev": "vite --port 5174", + "build": "tsc -b && vite build", + "preview": "vite preview --port 5174", + "lint": "tsc --noEmit" + }, + "dependencies": { + "axios": "^1.7.7", + "react": "^18.3.1", + "react-dom": "^18.3.1" + }, + "devDependencies": { + "@types/react": "^18.3.11", + "@types/react-dom": "^18.3.0", + "@vitejs/plugin-react": "^4.3.2", + "typescript": "^5.6.2", + "vite": "^5.4.8" + } +} diff --git a/blue_agent/frontend/public/.gitkeep b/blue_agent/frontend/public/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/blue_agent/frontend/src/App.tsx b/blue_agent/frontend/src/App.tsx new file mode 100644 index 000000000..0aa7bf49a --- /dev/null +++ b/blue_agent/frontend/src/App.tsx @@ -0,0 +1,5 @@ +import { BlueDashboard } from "@/pages/BlueDashboard"; + +export default function App() { + return ; +} diff --git a/blue_agent/frontend/src/api/blueApi.ts b/blue_agent/frontend/src/api/blueApi.ts new file mode 100644 index 000000000..14a9d9362 --- /dev/null +++ b/blue_agent/frontend/src/api/blueApi.ts @@ -0,0 +1,98 @@ +import axios from "axios"; +import type { + AssetInfo, + ClosePortRequest, + EnvironmentAlert, + EnvironmentStats, + EvolutionMetrics, + HardenServiceRequest, + RedReportRequest, + RemediationResult, + RemediationStatus, + ScanStats, + SSHCredentials, + SSHScanResult, + ToolCall, +} from "@/types/blue.types"; + +const BLUE_BASE_URL = + import.meta.env.VITE_BLUE_API_URL ?? "http://localhost:8002"; + +const client = axios.create({ + baseURL: BLUE_BASE_URL, + timeout: 120_000, // 2 min — SSH scans can take time on real servers +}); + +export const blueApi = { + health: () => client.get<{ status: string; agent: string }>("/health"), + + // Defense + closePort: (req: ClosePortRequest) => + client.post("/defend/close_port", req).then((r) => r.data), + hardenService: (req: HardenServiceRequest) => + client.post("/defend/harden_service", req).then((r) => r.data), + isolateHost: (host: string, reason?: string) => + client.post("/defend/isolate_host", { host, reason }).then((r) => r.data), + recentDefenses: (limit = 20) => + client + .get("/defend/recent", { params: { limit } }) + .then((r) => r.data), + + // Patching + applyPatch: (host: string, cve_id?: string, pkg?: string) => + client + .post("/patch/apply", { host, cve_id, package: pkg }) + .then((r) => r.data), + verifyFix: (host: string, cve_id: string) => + client.post("/patch/verify_fix", { host, cve_id }).then((r) => r.data), + + // Strategy + planDefense: (host: string, threat: Record = {}) => + client.post("/strategy/plan", { host, threat }).then((r) => r.data), + currentStrategy: () => + client.get("/strategy/current").then((r) => r.data), + evolutionMetrics: () => + client.get("/strategy/evolution").then((r) => r.data), + agentStatus: () => + client.get("/strategy/status").then((r) => r.data), + + // Scanning + assetInventory: (environment?: string) => + client + .get("/scan/inventory", { params: environment ? { environment } : {} }) + .then((r) => r.data), + vulnerableAssets: () => + client.get("/scan/vulnerable").then((r) => r.data), + scanStats: () => + client.get("/scan/stats").then((r) => r.data), + allVulnerabilities: () => + client.get("/scan/vulnerabilities").then((r) => r.data), + + // Environment monitoring + environmentAlerts: (environment?: string) => + client + .get("/environment/alerts", { + params: environment ? { environment } : {}, + }) + .then((r) => r.data), + environmentStats: () => + client.get("/environment/stats").then((r) => r.data), + + // SSH scanning (step 1: scan, step 2: apply fixes) + sshScan: (creds: SSHCredentials) => + client.post("/scan/ssh", creds).then((r) => r.data), + sshApplyFixes: () => + client.post("/scan/ssh/apply-fixes").then((r) => r.data), + sshScanResults: () => + client.get("/scan/ssh/results").then((r) => r.data), + sshScanStats: () => + client.get("/scan/ssh/stats").then((r) => r.data), + + // Remediation — Red report ingestion + ingestReport: (report: RedReportRequest) => + client.post("/remediate/ingest-report", report).then((r) => r.data), + runSampleRemediation: () => + client.post("/remediate/run-sample").then((r) => r.data), + remediationStatus: () => + client.get("/remediate/status").then((r) => r.data), +}; diff --git a/blue_agent/frontend/src/components/ActivityPanel.tsx b/blue_agent/frontend/src/components/ActivityPanel.tsx new file mode 100644 index 000000000..a4bae8e64 --- /dev/null +++ b/blue_agent/frontend/src/components/ActivityPanel.tsx @@ -0,0 +1,56 @@ +import type { ToolCall } from "@/types/blue.types"; +import { ToolCard } from "./ToolCard"; + +interface ActivityPanelProps { + toolCalls: ToolCall[]; + limit?: number; + accent?: string; +} + +export function ActivityPanel({ + toolCalls, + limit = 10, + accent = "#58a6ff", +}: ActivityPanelProps) { + const recent = [...toolCalls].slice(-limit).reverse(); + + return ( +
+
+

+ CURRENT ACTIVITY +

+ + {recent.length} tool calls + +
+
+ {recent.length === 0 ? ( +

No activity yet.

+ ) : ( + recent.map((call) => ) + )} +
+
+ ); +} diff --git a/blue_agent/frontend/src/components/ChatButton.tsx b/blue_agent/frontend/src/components/ChatButton.tsx new file mode 100644 index 000000000..de97efa8b --- /dev/null +++ b/blue_agent/frontend/src/components/ChatButton.tsx @@ -0,0 +1,29 @@ +interface ChatButtonProps { + accent?: string; + onClick?: () => void; +} + +export function ChatButton({ accent = "#58a6ff", onClick }: ChatButtonProps) { + return ( + + ); +} diff --git a/blue_agent/frontend/src/components/EnvironmentPanel.tsx b/blue_agent/frontend/src/components/EnvironmentPanel.tsx new file mode 100644 index 000000000..8844ffd5c --- /dev/null +++ b/blue_agent/frontend/src/components/EnvironmentPanel.tsx @@ -0,0 +1,146 @@ +import type { EnvironmentStats } from "@/types/blue.types"; + +interface EnvironmentPanelProps { + stats: EnvironmentStats | null; + accent?: string; +} + +const ENV_COLORS: Record = { + cloud: "#58a6ff", + onprem: "#7ee787", + hybrid: "#d29922", +}; + +const SEV_COLORS: Record = { + critical: "#f85149", + high: "#f0883e", + medium: "#d29922", + low: "#8b949e", +}; + +export function EnvironmentPanel({ stats, accent = "#58a6ff" }: EnvironmentPanelProps) { + if (!stats) { + return ( +
+
+

Monitoring initializing...

+
+ ); + } + + return ( +
+
+ +
+ {(["cloud", "onprem", "hybrid"] as const).map((env) => ( +
+
+ {env === "onprem" ? "ON-PREM" : env.toUpperCase()} +
+
+ {stats.by_environment[env] ?? 0} +
+
alerts
+
+ ))} +
+ +
ALERT SEVERITY
+
+ {Object.entries(stats.by_severity) + .filter(([, count]) => count > 0) + .map(([sev, count]) => ( + + {sev}: {count} + + ))} +
+ + {Object.keys(stats.by_category).length > 0 && ( + <> +
BY CATEGORY
+
+ {Object.entries(stats.by_category).map(([cat, count]) => ( + + {cat}: {count} + + ))} +
+ + )} + +
+ {stats.monitoring_active ? "MONITORING ACTIVE" : "MONITORING INACTIVE"} + {" "}· {stats.total_alerts} total alerts +
+
+ ); +} + +function Header({ accent }: { accent: string }) { + return ( +
+

+ ENVIRONMENT MONITOR +

+ cloud + onprem + hybrid +
+ ); +} + +function sectionStyle(accent: string): React.CSSProperties { + return { + background: "#0d1117", + borderRadius: 8, + padding: 12, + border: `1px solid ${accent}55`, + height: "100%", + overflowY: "auto", + }; +} diff --git a/blue_agent/frontend/src/components/EvolutionPanel.tsx b/blue_agent/frontend/src/components/EvolutionPanel.tsx new file mode 100644 index 000000000..e86513127 --- /dev/null +++ b/blue_agent/frontend/src/components/EvolutionPanel.tsx @@ -0,0 +1,127 @@ +import type { EvolutionMetrics } from "@/types/blue.types"; + +interface EvolutionPanelProps { + metrics: EvolutionMetrics | null; + accent?: string; +} + +export function EvolutionPanel({ metrics, accent = "#58a6ff" }: EvolutionPanelProps) { + if (!metrics) { + return ( +
+
+

Evolver initializing...

+
+ ); + } + + const improvementColor = + metrics.improvement_pct > 10 ? "#3fb950" : metrics.improvement_pct > 0 ? "#d29922" : "#8b949e"; + + return ( +
+
+ +
+ + + + + +
+ + {metrics.current_params && Object.keys(metrics.current_params).length > 0 && ( + <> +
TUNED PARAMETERS
+
+ {Object.entries(metrics.current_params).map(([key, val]) => ( +
+ {key}:{" "} + {typeof val === "number" ? val.toFixed(2) : String(val)} +
+ ))} +
+ + )} + + {metrics.top_attack_patterns.length > 0 && ( + <> +
+ TOP ATTACK PATTERNS ({metrics.total_patterns_tracked} tracked) +
+
+ {metrics.top_attack_patterns.slice(0, 5).map((p) => ( +
+ {p.pattern} + {p.count} +
+ ))} +
+ + )} +
+ ); +} + +function Header({ accent }: { accent: string }) { + return ( +
+

+ DEFENSIVE EVOLUTION +

+ learning +
+ ); +} + +function MetricBox({ label, value, color }: { label: string; value: number | string; color: string }) { + return ( +
+
{value}
+
{label}
+
+ ); +} + +function sectionStyle(accent: string): React.CSSProperties { + return { + background: "#0d1117", + borderRadius: 8, + padding: 12, + border: `1px solid ${accent}55`, + height: "100%", + overflowY: "auto", + }; +} diff --git a/blue_agent/frontend/src/components/FixPlanPanel.tsx b/blue_agent/frontend/src/components/FixPlanPanel.tsx new file mode 100644 index 000000000..e94059d93 --- /dev/null +++ b/blue_agent/frontend/src/components/FixPlanPanel.tsx @@ -0,0 +1,167 @@ +import type { SSHScanResult } from "@/types/blue.types"; + +interface FixPlanPanelProps { + result: SSHScanResult; + applying: boolean; + onApply: () => void; + accent?: string; +} + +const SEV_COLORS: Record = { + critical: "#f85149", + high: "#f0883e", + medium: "#d29922", + low: "#8b949e", +}; + +export function FixPlanPanel({ result, applying, onApply, accent = "#58a6ff" }: FixPlanPanelProps) { + const vulnerable = result.services.filter((s) => s.cve_count > 0); + const allFixed = vulnerable.length > 0 && vulnerable.every((s) => s.fixed); + const totalCmds = vulnerable.reduce((sum, s) => sum + (s.proposed_fixes?.length ?? 0), 0); + + return ( +
+ {/* Header */} +
+

+ {allFixed ? "FIXES APPLIED" : "FIX PLAN"} +

+ {!allFixed && vulnerable.length > 0 && ( + + )} + {allFixed && ( + ALL PATCHED + )} +
+ + {/* Fix list */} +
+ {vulnerable.length === 0 && ( +
+ No vulnerabilities found — server is clean. +
+ )} + + {vulnerable.map((svc, i) => ( +
+ {/* Service header */} +
+ + {svc.software} {svc.version} + {svc.port && :{svc.port}} + + {svc.fixed ? ( + + PATCHED + + ) : ( + + {svc.cve_count} CVE{svc.cve_count > 1 ? "s" : ""} + + )} +
+ + {/* CVEs */} + {svc.cves.map((cve) => ( +
+ {cve.cve_id} + CVSS {cve.cvss_score} ({cve.severity}) + — {cve.description.slice(0, 80)} +
+ ))} + + {/* Proposed fix commands */} + {!svc.fixed && svc.proposed_fixes && svc.proposed_fixes.length > 0 && ( +
+
+ COMMANDS TO EXECUTE: +
+ {svc.proposed_fixes.map((line, j) => { + const isCmd = line.trimStart().startsWith("$"); + const isHeader = !isCmd && !line.startsWith(" "); + return ( +
+ {line} +
+ ); + })} +
+ )} + + {/* Fix result */} + {svc.fixed && ( +
+ Fix applied — upgrade + hardening executed on server +
+ )} +
+ ))} +
+
+ ); +} diff --git a/blue_agent/frontend/src/components/LogStream.tsx b/blue_agent/frontend/src/components/LogStream.tsx new file mode 100644 index 000000000..ee5fb7bb1 --- /dev/null +++ b/blue_agent/frontend/src/components/LogStream.tsx @@ -0,0 +1,81 @@ +import { useEffect, useRef } from "react"; +import type { LogEntry } from "@/types/blue.types"; + +interface LogStreamProps { + logs: LogEntry[]; + accent?: string; +} + +function formatTime(ts: string): string { + const d = new Date(ts); + const hh = String(d.getHours()).padStart(2, "0"); + const mm = String(d.getMinutes()).padStart(2, "0"); + const ss = String(d.getSeconds()).padStart(2, "0"); + return `${hh}:${mm}:${ss}`; +} + +const LEVEL_COLORS: Record = { + INFO: "#7ee787", + WARN: "#d29922", + ERROR: "#f85149", +}; + +export function LogStream({ logs, accent = "#58a6ff" }: LogStreamProps) { + const bottomRef = useRef(null); + + useEffect(() => { + bottomRef.current?.scrollIntoView({ behavior: "smooth" }); + }, [logs]); + + return ( +
+
+

+ LIVE LOGS +

+ {logs.length} lines +
+
+ {logs.map((line, i) => ( +
+ [{formatTime(line.timestamp)}] + + {line.level} + + {line.message} +
+ ))} +
+
+
+ ); +} diff --git a/blue_agent/frontend/src/components/RemediationPanel.tsx b/blue_agent/frontend/src/components/RemediationPanel.tsx new file mode 100644 index 000000000..1f5cc02c0 --- /dev/null +++ b/blue_agent/frontend/src/components/RemediationPanel.tsx @@ -0,0 +1,182 @@ +import type { RemediationResult } from "@/types/blue.types"; + +interface Props { + result: RemediationResult; + accent: string; +} + +const SEV_COLORS: Record = { + critical: "#f85149", + high: "#f0883e", + medium: "#d29922", + low: "#7ee787", + info: "#8b949e", +}; + +export function RemediationPanel({ result, accent }: Props) { + return ( +
+ {/* Header */} +
+

+ REMEDIATION RESULTS +

+ + {result.status.toUpperCase()} + +
+ + {/* Summary stats */} +
+ + + +
+ + {/* Risk score */} +
= 9 ? "#f85149" : result.risk_score >= 7 ? "#f0883e" : "#d29922"}55`, + borderRadius: 6, + padding: "8px 12px", + display: "flex", + justifyContent: "space-between", + alignItems: "center", + }} + > + RISK SCORE + = 9 ? "#f85149" : result.risk_score >= 7 ? "#f0883e" : "#d29922", + fontSize: 18, + fontWeight: 700, + }} + > + {result.risk_score}/10 + +
+ + {/* Severity breakdown */} + {Object.keys(result.severity_counts).length > 0 && ( +
+ {Object.entries(result.severity_counts) + .filter(([, count]) => count > 0) + .map(([sev, count]) => ( + + {sev.toUpperCase()}: {count} + + ))} +
+ )} + + {/* Applied fixes list */} +
+ APPLIED FIXES + {result.applied_fixes.map((fix) => ( +
+
+ + {fix.fix_id.replace(/_/g, " ")} + + + {fix.status} + +
+
+ {fix.details} +
+
+ + {fix.severity.toUpperCase()} + + + {fix.steps_applied} steps + + {fix.endpoint && ( + + {fix.endpoint as string} + + )} +
+
+ ))} +
+
+ ); +} + +function StatBox({ label, value, color }: { label: string; value: number; color: string }) { + return ( +
+
{value}
+
{label}
+
+ ); +} diff --git a/blue_agent/frontend/src/components/ReportInputPanel.tsx b/blue_agent/frontend/src/components/ReportInputPanel.tsx new file mode 100644 index 000000000..05e8c1ff2 --- /dev/null +++ b/blue_agent/frontend/src/components/ReportInputPanel.tsx @@ -0,0 +1,148 @@ +import { useState } from "react"; + +interface Props { + accent: string; + onSubmit: (raw: string) => void; + onRunSample: () => void; + submitting: boolean; +} + +const PLACEHOLDER = `Paste the Red Team report JSON here, for example: + +{ + "target": "http://172.25.8.172:5000", + "risk_score": 10.0, + "recon": { + "open_ports": [{"port": 5000, "service": "Flask"}], + "tech_stack": {"language": "Python", "framework": "Flask", "database": "SQLite"}, + "vulnerabilities": [{ + "type": "sql_injection", + "severity": "critical", + "endpoint": "/login", + "description": "SQL Injection on /login" + }] + }, + "exploit": { + "database": {"type": "SQLite", "tables": ["users","products","secrets"]}, + "exfiltrated_data": [{ + "table": "users", + "rows": [{"username":"admin","password":"sup3rs3cr3t","role":"admin"}], + "has_plaintext_passwords": true + }], + "credentials_stolen": [{"username":"admin","role":"admin"}] + }, + "recommendations": [ + {"severity":"critical","category":"sql_injection_fix","action":"parameterized_queries","description":"Use parameterized queries"}, + {"severity":"critical","category":"password_hashing","action":"hash_passwords","description":"Hash passwords with bcrypt"} + ] +}`; + +export function ReportInputPanel({ accent, onSubmit, onRunSample, submitting }: Props) { + const [raw, setRaw] = useState(""); + const [parseError, setParseError] = useState(null); + + const handleSubmit = () => { + setParseError(null); + const text = raw.trim(); + if (!text) { + setParseError("Paste a report first"); + return; + } + try { + JSON.parse(text); + onSubmit(text); + } catch { + setParseError("Invalid JSON — check the format and try again"); + } + }; + + return ( +
+
+

+ RED TEAM REPORT INPUT +

+ paste JSON or use sample +
+ +