diff --git a/pyproject.toml b/pyproject.toml index 1364e6a..6b5344b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,6 +72,7 @@ include = [ "src/notifications", "src/shortcuts", "src/telegram_bot", + "src/spark", ] [tool.pytest.ini_options] diff --git a/src/config.py b/src/config.py index 506e643..4197bd1 100644 --- a/src/config.py +++ b/src/config.py @@ -28,6 +28,12 @@ class Settings(BaseSettings): # 8b ~16 GB | 70b ~140 GB | 405b ~810 GB airllm_model_size: Literal["8b", "70b", "405b"] = "70b" + # ── Spark Intelligence ──────────────────────────────────────────────── + # Enable/disable the Spark cognitive layer. + # When enabled, Spark captures swarm events, runs EIDOS predictions, + # consolidates memories, and generates advisory recommendations. + spark_enabled: bool = True + model_config = SettingsConfigDict( env_file=".env", env_file_encoding="utf-8", diff --git a/src/dashboard/app.py b/src/dashboard/app.py index 78e7be2..729c7cd 100644 --- a/src/dashboard/app.py +++ b/src/dashboard/app.py @@ -23,6 +23,7 @@ from dashboard.routes.briefing import router as briefing_router from dashboard.routes.telegram import router as telegram_router from dashboard.routes.swarm_internal import router as swarm_internal_router from dashboard.routes.tools import router as tools_router +from dashboard.routes.spark import router as spark_router logging.basicConfig( level=logging.INFO, @@ -97,6 +98,11 @@ async def lifespan(app: FastAPI): except Exception as exc: logger.error("Failed to spawn persona agents: %s", exc) + # Initialise Spark Intelligence engine + from spark.engine import spark_engine + if spark_engine.enabled: + logger.info("Spark Intelligence active — event capture enabled") + # Auto-start Telegram bot if a token is configured from telegram_bot.bot import telegram_bot await telegram_bot.start() @@ -136,6 +142,7 @@ app.include_router(briefing_router) app.include_router(telegram_router) app.include_router(swarm_internal_router) app.include_router(tools_router) +app.include_router(spark_router) @app.get("/", response_class=HTMLResponse) diff --git a/src/dashboard/routes/spark.py b/src/dashboard/routes/spark.py new file mode 100644 index 0000000..f998050 --- /dev/null +++ b/src/dashboard/routes/spark.py @@ -0,0 +1,147 @@ +"""Spark Intelligence dashboard routes. + +GET /spark — JSON status (API) +GET /spark/ui — HTML Spark Intelligence dashboard +GET /spark/timeline — HTMX partial: recent event timeline +GET /spark/insights — HTMX partial: advisories and insights +GET /spark/predictions — HTMX partial: EIDOS predictions +""" + +import json +import logging +from pathlib import Path + +from fastapi import APIRouter, Request +from fastapi.responses import HTMLResponse +from fastapi.templating import Jinja2Templates + +from spark.engine import spark_engine + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/spark", tags=["spark"]) +templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates")) + + +@router.get("/ui", response_class=HTMLResponse) +async def spark_ui(request: Request): + """Render the Spark Intelligence dashboard page.""" + status = spark_engine.status() + advisories = spark_engine.get_advisories() + timeline = spark_engine.get_timeline(limit=20) + predictions = spark_engine.get_predictions(limit=10) + memories = spark_engine.get_memories(limit=10) + + # Parse event data JSON for template display + timeline_enriched = [] + for ev in timeline: + entry = { + "id": ev.id, + "event_type": ev.event_type, + "agent_id": ev.agent_id, + "task_id": ev.task_id, + "description": ev.description, + "importance": ev.importance, + "created_at": ev.created_at, + } + try: + entry["data"] = json.loads(ev.data) + except (json.JSONDecodeError, TypeError): + entry["data"] = {} + timeline_enriched.append(entry) + + # Enrich predictions for display + predictions_enriched = [] + for p in predictions: + entry = { + "id": p.id, + "task_id": p.task_id, + "prediction_type": p.prediction_type, + "accuracy": p.accuracy, + "created_at": p.created_at, + "evaluated_at": p.evaluated_at, + } + try: + entry["predicted"] = json.loads(p.predicted_value) + except (json.JSONDecodeError, TypeError): + entry["predicted"] = {} + try: + entry["actual"] = json.loads(p.actual_value) if p.actual_value else None + except (json.JSONDecodeError, TypeError): + entry["actual"] = None + predictions_enriched.append(entry) + + return templates.TemplateResponse( + request, + "spark.html", + { + "status": status, + "advisories": advisories, + "timeline": timeline_enriched, + "predictions": predictions_enriched, + "memories": memories, + }, + ) + + +@router.get("", response_class=HTMLResponse) +async def spark_status_json(): + """Return Spark Intelligence status as JSON.""" + from fastapi.responses import JSONResponse + status = spark_engine.status() + advisories = spark_engine.get_advisories() + return JSONResponse({ + "status": status, + "advisories": [ + { + "category": a.category, + "priority": a.priority, + "title": a.title, + "detail": a.detail, + "suggested_action": a.suggested_action, + "subject": a.subject, + "evidence_count": a.evidence_count, + } + for a in advisories + ], + }) + + +@router.get("/timeline", response_class=HTMLResponse) +async def spark_timeline(request: Request): + """HTMX partial: recent event timeline.""" + timeline = spark_engine.get_timeline(limit=20) + timeline_enriched = [] + for ev in timeline: + entry = { + "id": ev.id, + "event_type": ev.event_type, + "agent_id": ev.agent_id, + "task_id": ev.task_id, + "description": ev.description, + "importance": ev.importance, + "created_at": ev.created_at, + } + try: + entry["data"] = json.loads(ev.data) + except (json.JSONDecodeError, TypeError): + entry["data"] = {} + timeline_enriched.append(entry) + + return templates.TemplateResponse( + request, + "partials/spark_timeline.html", + {"timeline": timeline_enriched}, + ) + + +@router.get("/insights", response_class=HTMLResponse) +async def spark_insights(request: Request): + """HTMX partial: advisories and consolidated memories.""" + advisories = spark_engine.get_advisories() + memories = spark_engine.get_memories(limit=10) + return templates.TemplateResponse( + request, + "partials/spark_insights.html", + {"advisories": advisories, "memories": memories}, + ) diff --git a/src/dashboard/templates/base.html b/src/dashboard/templates/base.html index 4d92db3..1fc5c2a 100644 --- a/src/dashboard/templates/base.html +++ b/src/dashboard/templates/base.html @@ -23,6 +23,7 @@