diff --git a/ai_governance/report_generator.py b/ai_governance/report_generator.py index 3b41cbd..b56fa02 100644 --- a/ai_governance/report_generator.py +++ b/ai_governance/report_generator.py @@ -117,6 +117,7 @@ class ReportGenerator: privacy = self.risk_results.get('privacy_risks', {}) return { + 'pii_detected': privacy.get('pii_detected', []), # Include full PII detections array 'pii_count': len(privacy.get('pii_detected', [])), 'anonymization_level': privacy.get('anonymization_level', 'UNKNOWN'), 'exposure_risk_count': len(privacy.get('exposure_risks', [])), diff --git a/api/routers/analyze.py b/api/routers/analyze.py index 287934f..288c186 100644 --- a/api/routers/analyze.py +++ b/api/routers/analyze.py @@ -123,10 +123,14 @@ async def analyze_dataset(file: UploadFile = File(...)): }, "risk_assessment": { "overall_risk_score": risk_assessment.get("overall_risk_score", 0), - "privacy_risks": risk_assessment.get("privacy_risks", []), - "ethical_risks": risk_assessment.get("ethical_risks", []), - "compliance_risks": risk_assessment.get("risk_categories", {}).get("compliance_risks", []), - "data_quality_risks": risk_assessment.get("risk_categories", {}).get("data_quality_risks", []) + "risk_level": risk_assessment.get("risk_level", "LOW"), + "presidio_enabled": risk_assessment.get("presidio_enabled", False), + "privacy_risks": risk_assessment.get("privacy_risks", {}), + "ethical_risks": risk_assessment.get("ethical_risks", {}), + "compliance_risks": risk_assessment.get("compliance_risks", {}), + "risk_categories": risk_assessment.get("risk_categories", {}), + "violations": risk_assessment.get("violations", []), + "insights": risk_assessment.get("insights", []) }, "recommendations": report.get("recommendations", []), "report_file": f"/{report_path}", diff --git a/api/routers/chatbot.py b/api/routers/chatbot.py index 347fbea..88943eb 100644 --- a/api/routers/chatbot.py +++ b/api/routers/chatbot.py @@ -1,7 +1,9 @@ import ollama import chromadb from pypdf import PdfReader -from fastapi import FastAPI +from fastapi import FastAPI, HTTPException +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel import uvicorn from fastapi.middleware.cors import CORSMiddleware @@ -34,11 +36,24 @@ for i, chunk in enumerate(chunks): print("Embeddings done!") +# Allow browser calls from the frontend +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +class ChatRequest(BaseModel): + prompt: str + @app.post("/chat") -async def chat_bot(prompt: str): - if not prompt: - return - query = prompt +async def chat_bot(prompt: str | None = None, body: ChatRequest | None = None): + # Accept prompt from either query (?prompt=) or JSON body {"prompt": "..."} + query = prompt or (body.prompt if body else None) + if not query: + raise HTTPException(status_code=400, detail="Missing prompt") response = ollama.embed(model="nomic-embed-text", input=query) query_embedding = response["embeddings"][0] diff --git a/frontend/app/api/chat/route.ts b/frontend/app/api/chat/route.ts new file mode 100644 index 0000000..13fa318 --- /dev/null +++ b/frontend/app/api/chat/route.ts @@ -0,0 +1,38 @@ +import { NextResponse } from 'next/server'; + +const CHAT_HOST = process.env.CHAT_API_URL || process.env.NEXT_PUBLIC_CHAT_API_URL || 'https://f52c8f4e7dfc.ngrok-free.app'; + +export async function POST(req: Request) { + try { + const body = await req.json().catch(() => ({})); + const prompt = typeof body?.prompt === 'string' ? body.prompt : ''; + if (!prompt.trim()) { + return NextResponse.json({ detail: 'Missing prompt' }, { status: 400 }); + } + + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), 120_000); + try { + const upstream = await fetch(`${CHAT_HOST}/chat`, { + method: 'POST', + headers: { 'Content-Type': 'application/json', 'Accept': 'application/json' }, + body: JSON.stringify({ prompt }), + signal: controller.signal, + }); + + const text = await upstream.text(); + let json: any; + try { json = JSON.parse(text); } catch { json = { response: text }; } + + if (!upstream.ok) { + return NextResponse.json(json || { detail: 'Chat failed' }, { status: upstream.status }); + } + return NextResponse.json(json); + } finally { + clearTimeout(timeout); + } + } catch (err: any) { + const msg = err?.name === 'AbortError' ? 'Request timed out – model may be overloaded.' : (err?.message || 'Unexpected error'); + return NextResponse.json({ detail: msg }, { status: 500 }); + } +} diff --git a/frontend/components/Navbar.tsx b/frontend/components/Navbar.tsx index 59d2833..1b6d701 100644 --- a/frontend/components/Navbar.tsx +++ b/frontend/components/Navbar.tsx @@ -1,23 +1,15 @@ "use client"; import Link from 'next/link'; import { usePathname } from 'next/navigation'; -import { useEffect, useState } from 'react'; +import { useState } from 'react'; export function Navbar() { const pathname = usePathname(); const onTry = pathname?.startsWith('/try'); - const [scrolled, setScrolled] = useState(false); const [menuOpen, setMenuOpen] = useState(false); - useEffect(() => { - const onScroll = () => setScrolled(window.scrollY > 4); - onScroll(); - window.addEventListener('scroll', onScroll, { passive: true }); - return () => window.removeEventListener('scroll', onScroll); - }, []); - return ( -