diff --git a/frontend/app/try/page.tsx b/frontend/app/try/page.tsx index bf2e24c..8479dc1 100644 --- a/frontend/app/try/page.tsx +++ b/frontend/app/try/page.tsx @@ -9,13 +9,13 @@ export default function TryPage() { const [tab, setTab] = useState("processing"); return ( -
+
-
+
-
+
setTab("bias-analysis")} />
-
+
diff --git a/frontend/components/try/CenterPanel_BACKUP.tsx b/frontend/components/try/CenterPanel_BACKUP.tsx deleted file mode 100644 index 3c587b2..0000000 --- a/frontend/components/try/CenterPanel_BACKUP.tsx +++ /dev/null @@ -1,383 +0,0 @@ -"use client"; -import { TryTab } from "./Sidebar"; -import { useState, useRef, useCallback, useEffect } from "react"; - -interface CenterPanelProps { - tab: TryTab; - onAnalyze?: () => void; -} - -interface UploadedFileMeta { - name: string; - size: number; - type: string; - contentPreview: string; -} - -interface TablePreviewData { - headers: string[]; - rows: string[][]; - origin: 'csv'; -} - -export function CenterPanel({ tab, onAnalyze }: CenterPanelProps) { - const PREVIEW_BYTES = 64 * 1024; // read first 64KB slice for large-file preview - const [fileMeta, setFileMeta] = useState(null); - const [isDragging, setIsDragging] = useState(false); - const [progress, setProgress] = useState(0); - const [progressLabel, setProgressLabel] = useState("Processing"); - const [tablePreview, setTablePreview] = useState(null); - const inputRef = useRef(null); - const [loadedFromCache, setLoadedFromCache] = useState(false); - - const reset = () => { - setFileMeta(null); - setProgress(0); - setProgressLabel("Processing"); - setTablePreview(null); - }; - - function tryParseCSV(text: string, maxRows = 50, maxCols = 40): TablePreviewData | null { - const lines = text.split(/\r?\n/).filter(l => l.trim().length > 0); - if (lines.length < 2) return null; - const commaDensity = lines.slice(0, 10).filter(l => l.includes(',')).length; - if (commaDensity < 2) return null; - const parseLine = (line: string) => { - const out: string[] = []; - let cur = ''; - let inQuotes = false; - for (let i = 0; i < line.length; i++) { - const ch = line[i]; - if (ch === '"') { - if (inQuotes && line[i + 1] === '"') { cur += '"'; i++; } else { inQuotes = !inQuotes; } - } else if (ch === ',' && !inQuotes) { - out.push(cur); - cur = ''; - } else { cur += ch; } - } - out.push(cur); - return out.map(c => c.trim()); - }; - const raw = lines.slice(0, maxRows).map(parseLine); - if (raw.length === 0) return null; - const headers = raw[0]; - const colCount = Math.min(headers.length, maxCols); - const rows = raw.slice(1).map(r => r.slice(0, colCount)); - return { headers: headers.slice(0, colCount), rows, origin: 'csv' }; - } - - // We no longer build table preview for JSON; revert JSON to raw text view. - - const processFile = useCallback(async (f: File) => { - if (!f) return; - const isCSV = /\.csv$/i.test(f.name); - setProgress(0); - // For large files, show a progress bar while reading the file stream (no preview) - if (f.size > 1024 * 1024) { - setProgressLabel("Uploading"); - const metaObj: UploadedFileMeta = { - name: f.name, - size: f.size, - type: f.type || "unknown", - contentPreview: `Loading partial preview (first ${Math.round(PREVIEW_BYTES/1024)}KB)...`, - }; - setFileMeta(metaObj); - setTablePreview(null); - // Save to IndexedDB immediately so it persists without needing full read - (async () => { - try { await saveLatestUpload(f, metaObj); } catch {} - })(); - // Read head slice for partial preview & possible CSV table extraction - try { - const headBlob = f.slice(0, PREVIEW_BYTES); - const headReader = new FileReader(); - headReader.onload = async () => { - try { - const buf = headReader.result as ArrayBuffer; - const decoder = new TextDecoder(); - const text = decoder.decode(buf); - setFileMeta(prev => prev ? { ...prev, contentPreview: text.slice(0, 4000) } : prev); - if (isCSV) { - const parsed = tryParseCSV(text); - setTablePreview(parsed); - } else { - setTablePreview(null); - } - try { await saveLatestUpload(f, { ...metaObj, contentPreview: text.slice(0, 4000) }); } catch {} - } catch { /* ignore */ } - }; - headReader.readAsArrayBuffer(headBlob); - } catch { /* ignore */ } - // Use streaming read for progress without buffering entire file in memory - try { - const stream: ReadableStream | undefined = (typeof (f as any).stream === "function" ? (f as any).stream() : undefined); - if (stream && typeof stream.getReader === "function") { - const reader = stream.getReader(); - let loaded = 0; - const total = f.size || 1; - for (;;) { - const { done, value } = await reader.read(); - if (done) break; - loaded += value ? value.length : 0; - const pct = Math.min(100, Math.round((loaded / total) * 100)); - setProgress(pct); - } - setProgress(100); - } else { - // Fallback to FileReader progress events - const reader = new FileReader(); - reader.onprogress = (evt) => { - if (evt.lengthComputable) { - const pct = Math.min(100, Math.round((evt.loaded / evt.total) * 100)); - setProgress(pct); - } else { - setProgress((p) => (p < 90 ? p + 5 : p)); - } - }; - reader.onloadend = () => setProgress(100); - reader.onerror = () => setProgress(0); - reader.readAsArrayBuffer(f); - } - } catch { - setProgress(100); - } - return; - } - const reader = new FileReader(); - reader.onprogress = (evt) => { - if (evt.lengthComputable) { - const pct = Math.min(100, Math.round((evt.loaded / evt.total) * 100)); - setProgress(pct); - } else { - setProgress((p) => (p < 90 ? p + 5 : p)); - } - }; - reader.onload = async () => { - try { - const buf = reader.result as ArrayBuffer; - const decoder = new TextDecoder(); - const text = decoder.decode(buf); - const metaObj: UploadedFileMeta = { - name: f.name, - size: f.size, - type: f.type || "unknown", - contentPreview: text.slice(0, 4000), - }; - setFileMeta(metaObj); - if (isCSV) { - const parsed = tryParseCSV(text); - setTablePreview(parsed); - } else { - setTablePreview(null); - } - // Save file blob and meta to browser cache (IndexedDB) - try { - await saveLatestUpload(f, metaObj); - } catch {} - setProgressLabel("Processing"); - setProgress(100); - } catch (e) { - const metaObj: UploadedFileMeta = { - name: f.name, - size: f.size, - type: f.type || "unknown", - contentPreview: "Unable to decode preview.", - }; - setFileMeta(metaObj); - setTablePreview(null); - try { - await saveLatestUpload(f, metaObj); - } catch {} - setProgressLabel("Processing"); - setProgress(100); - } - }; - reader.onerror = () => { - setProgress(0); - }; - reader.readAsArrayBuffer(f); - }, []); - - function handleFileChange(e: React.ChangeEvent) { - const f = e.target.files?.[0]; - processFile(f as File); - } - - const onDragOver = (e: React.DragEvent) => { - e.preventDefault(); - setIsDragging(true); - }; - const onDragLeave = () => setIsDragging(false); - const onDrop = (e: React.DragEvent) => { - e.preventDefault(); - setIsDragging(false); - const f = e.dataTransfer.files?.[0]; - processFile(f as File); - }; - - // Load last cached upload on mount (processing tab only) - useEffect(() => { - let ignore = false; - if (tab !== "processing") return; - (async () => { - try { - const { meta } = await getLatestUpload(); - if (!ignore && meta) { - setFileMeta(meta as UploadedFileMeta); - setLoadedFromCache(true); - } - } catch {} - })(); - return () => { - ignore = true; - }; - }, [tab]); - - function renderTabContent() { - switch (tab) { - case "processing": - return ( -
-

Upload & Process Data

-

Upload a CSV / JSON / text file. We will later parse, detect PII, and queue analyses.

-
-
-

Drag & drop a CSV / JSON / TXT here, or click to browse.

-
- -
-
- - {progress > 0 && ( -
-
-
-
-
{progressLabel} {progress}%
-
- )} - {fileMeta && ( -
-
-
{fileMeta.name}
-
{Math.round(fileMeta.size / 1024)} KB
-
- {loadedFromCache && ( -
Loaded from browser cache
- )} -
{fileMeta.type || "Unknown type"}
- {/* Table preview when structured data detected; otherwise show text */} - {tablePreview && tablePreview.origin === 'csv' ? ( -
- - - - {tablePreview.headers.map((h, idx) => ( - - ))} - - - - {tablePreview.rows.map((r, i) => ( - - {r.map((c, j) => ( - - ))} - - ))} - -
{h}
{c}
-
- ) : ( -
-														{fileMeta.contentPreview || "(no preview)"}
-													
- )} -
- - -
-
- )} -
-
- ); - case "bias-analysis": - return ( -
-

Bias Analysis (Placeholder)

-

Once processing completes, bias metrics will appear here (distribution, representation, fairness indicators).

-
- ); - case "risk-analysis": - return ( -
-

Risk Analysis (Placeholder)

-

Potential privacy exposure, sensitive attribute concentration, consent gaps will be displayed.

-
- ); - case "bias-risk-mitigation": - return ( -
-

Mitigation Suggestions (Placeholder)

-

Recommended transformations, anonymization strategies, sampling adjustments, consent workflows.

-
- ); - case "results": - return ( -
-

Results Summary (Placeholder)

-

Aggregated findings and downloadable compliance report will appear here.

-
- ); - default: - return null; - } - } - - return ( -
- {renderTabContent()} -
- ); -} diff --git a/frontend/components/try/ChatbotPanel.tsx b/frontend/components/try/ChatbotPanel.tsx index ebff748..8309601 100644 --- a/frontend/components/try/ChatbotPanel.tsx +++ b/frontend/components/try/ChatbotPanel.tsx @@ -73,12 +73,12 @@ export function ChatbotPanel() { } } - return ( -
+ return ( +

Privacy Copilot

-
+
{messages.map((m, i) => (
Workflow
-