Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 61 additions & 34 deletions backend/routers/reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,21 +8,33 @@

import csv
import io
import json
from datetime import UTC, date, datetime
from typing import Any, Dict, List
from typing import List, Optional

from fastapi import APIRouter, Depends
from fastapi.responses import PlainTextResponse
from sqlalchemy import func, select
from sqlalchemy.ext.asyncio import AsyncSession

from backend.db.database import (AssessmentRecord, ControlRecord,
EvidenceRecord, get_db,
get_latest_assessments)
from backend.db.database import (
AssessmentRecord,
ControlRecord,
get_db,
get_latest_assessments,
)

router = APIRouter()

ZT_PILLAR_DOMAINS = {
"User": ["AC", "IA", "PS"],
"Device": ["CM", "MA", "PE"],
"Network": ["SC", "AC"],
"Application": ["CM", "CA", "SI"],
"Data": ["MP", "SC", "AU"],
"Visibility & Analytics": ["AU", "IR", "RA"],
"Automation & Orchestration": ["IR", "SI", "CA"],
}


def get_status_emoji(status: str) -> str:
"""Map implementation status to a visual emoji for better scannability."""
Expand All @@ -31,7 +43,7 @@ def get_status_emoji(status: str) -> str:
"partial": "🟑",
"partially_implemented": "🟑",
"planned": "πŸ“",
"not_implemented": "πŸ›‘",
"not_implemented": "🚫",
"na": "βšͺ",
"not_started": "βšͺ",
}
Expand All @@ -52,6 +64,18 @@ def get_confidence_stars(confidence: float) -> str:
return "⭐" * stars + "β˜†" * (5 - stars)


def get_maturity_pct(assessments: List[AssessmentRecord]) -> float:
"""Standardized maturity calculation (0.5 weight for partial)."""
if not assessments:
return 0.0
total = len(assessments)
implemented = sum(1 for a in assessments if a.status == "implemented")
partial = sum(
1 for a in assessments if a.status in ["partial", "partially_implemented"]
)
return ((implemented + (partial * 0.5)) / total) * 100


@router.get("/ssp", summary="Generate System Security Plan (SSP) in Markdown")
async def generate_ssp(
system_name: str = "AGI Corp CMMC System",
Expand Down Expand Up @@ -94,21 +118,24 @@ async def generate_ssp(
)
sprs_estimate = max(-203, round(sprs_estimate, 0))

total_controls_count = len(controls)
compliance_pct = (
(status_counts["implemented"] / total_controls_count * 100)
if total_controls_count > 0
else 0
)
progress_bar = get_progress_bar(compliance_pct)
# Calculate ZT Pillar Maturity
zt_maturity = {}
for pillar, domains in ZT_PILLAR_DOMAINS.items():
pillar_assessments = [
a
for a in assessments
if a.control_id.split(".")[0] in domains
or (controls.get(a.control_id) and controls[a.control_id].domain in domains)
Comment on lines +124 to +128
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P1 Badge Base pillar maturity on full control set, not assessed subset

In generate_ssp, pillar maturity is computed from assessments only, so controls in the pillar that have no current assessment are dropped from the denominator. That means a pillar can show very high (or 100%) maturity when only a small assessed subset is implemented, which materially overstates compliance posture in the SSP. This affects any environment where assessments are partial/incomplete; use the mapped controls for each pillar as the denominator and treat missing assessments explicitly.

Useful? React with πŸ‘Β / πŸ‘Ž.

]
zt_maturity[pillar] = get_maturity_pct(pillar_assessments)

ssp = f"""# System Security Plan (SSP)
## {system_name}

**Classification:** {classification}
**Classification:** {classification}
**Generated:** {datetime.now(UTC).strftime('%Y-%m-%d %H:%M UTC')}
**Framework:** CMMC 2.0 Level 2 / NIST SP 800-171 Rev 2
**SPRS Score Estimate:** {sprs_estimate}
**Framework:** CMMC 2.0 Level 2 / NIST SP 800-171 Rev 2
**SPRS Score Estimate:** {sprs_estimate}
**Overall Compliance:** {get_progress_bar(implemented_pct)}

---
Expand Down Expand Up @@ -136,23 +163,28 @@ async def generate_ssp(
| Not Implemented | {get_status_emoji('not_implemented')} {status_counts['not_implemented']} |
| N/A | {get_status_emoji('na')} {status_counts['na']} |

[↑ Back to Top](#system-security-plan-ssp)

## 2. Control Implementation Summary

### Zero Trust Pillar Alignment

| ZT Pillar | CMMC Domains | Status |
|-----------|--------------|--------|
| User | AC, IA, PS | See assessment |
| Device | CM, MA, PE | See assessment |
| Network | SC, AC | See assessment |
| Application | CM, CA, SI | See assessment |
| Data | MP, SC, AU | See assessment |
| Visibility & Analytics | AU, IR, RA | See assessment |
| Automation & Orchestration | IR, SI, CA | See assessment |
| ZT Pillar | CMMC Domains | Maturity |
|-----------|--------------|----------|
| User | AC, IA, PS | {get_progress_bar(zt_maturity["User"])} |
| Device | CM, MA, PE | {get_progress_bar(zt_maturity["Device"])} |
| Network | SC, AC | {get_progress_bar(zt_maturity["Network"])} |
| Application | CM, CA, SI | {get_progress_bar(zt_maturity["Application"])} |
| Data | MP, SC, AU | {get_progress_bar(zt_maturity["Data"])} |
| Visibility & Analytics | AU, IR, RA | {get_progress_bar(zt_maturity["Visibility & Analytics"])} |
| Automation & Orchestration | IR, SI, CA | {get_progress_bar(zt_maturity["Automation & Orchestration"])} |

[↑ Back to Top](#system-security-plan-ssp)

## 3. Assessment Findings

*Note: Only the first 20 assessment findings are displayed in this summary.*
*Note: Only the first 20 assessment findings are displayed in this summary.
Showing {min(20, len(assessments))} of {len(assessments)} findings.*

"""

Expand Down Expand Up @@ -249,7 +281,7 @@ async def generate_poam(
content=csv_content,
media_type="text/csv",
headers={
"Content-Disposition": f'attachment; filename="poam_{system_name.replace(" ","_")}.csv"'
"Content-Disposition": f'attachment; filename="poam_{system_name.replace(" ", "_")}.csv"'
},
)

Expand Down Expand Up @@ -300,13 +332,8 @@ async def get_dashboard(
round(implemented / total_controls * 100, 1) if total_controls else 0
),
"zt_pillars": [
{"pillar": "User", "domains": ["AC", "IA", "PS"]},
{"pillar": "Device", "domains": ["CM", "MA", "PE"]},
{"pillar": "Network", "domains": ["SC", "AC"]},
{"pillar": "Application", "domains": ["CM", "CA", "SI"]},
{"pillar": "Data", "domains": ["MP", "SC", "AU"]},
{"pillar": "Visibility & Analytics", "domains": ["AU", "IR", "RA"]},
{"pillar": "Automation & Orchestration", "domains": ["IR", "SI", "CA"]},
{"pillar": pillar, "domains": domains}
for pillar, domains in ZT_PILLAR_DOMAINS.items()
],
"agents": [
{"name": "orchestrator", "endpoint": "/api/orchestrator"},
Expand Down
8 changes: 7 additions & 1 deletion tests/test_palette_ux.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

import pytest
from httpx import ASGITransport, AsyncClient
from sqlalchemy.ext.asyncio import AsyncSession

from backend.db.database import AssessmentRecord, Base, engine, init_db
from backend.main import app
Expand Down Expand Up @@ -73,3 +72,10 @@ async def test_ssp_ux_elements():
assert "⭐⭐⭐⭐⭐" in content
# 0.5 confidence should have 3 stars: β­β­β­β˜†β˜† (based on int(0.5 * 5 + 0.5) = 3)
assert "β­β­β­β˜†β˜†" in content

# Palette UX Enhancements check
assert "🚫" in content or "not_implemented" in content
assert "Zero Trust Pillar Alignment" in content
assert "Maturity" in content
assert "[↑ Back to Top](#system-security-plan-ssp)" in content
assert "Showing" in content and "findings" in content
Comment on lines +77 to +81
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟑 Minor

Tighten assertions to validate the actual UX elements, not fallback text.

Line 78 can pass even if 🚫 regresses, because "not_implemented" appears elsewhere in static content. Line 82 is also too broad for the findings summary format.

βœ… More precise assertions
+import re
@@
-        assert "🚫" in content or "not_implemented" in content
+        assert "| Not Implemented | 🚫 " in content
@@
-        assert "Showing" in content and "findings" in content
+        assert re.search(r"Showing\s+\d+\s+of\s+\d+\s+findings", content)

Loading