mirror of
https://github.com/PlatypusPus/MushroomEmpire.git
synced 2026-02-07 22:18:59 +00:00
ref:Cleaned the Structure
This commit is contained in:
72
api/main.py
Normal file
72
api/main.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""
|
||||
FastAPI Backend for Nordic Privacy AI
|
||||
Provides endpoints for AI Governance analysis and data cleaning
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
import os
|
||||
|
||||
from api.routers import analyze, clean
|
||||
|
||||
# Create FastAPI app
|
||||
app = FastAPI(
|
||||
title="Nordic Privacy AI API",
|
||||
description="AI-powered GDPR compliance, bias detection, and risk analysis",
|
||||
version="1.0.0"
|
||||
)
|
||||
|
||||
# CORS configuration for Next.js frontend
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=[
|
||||
"http://localhost:3000", # Next.js dev server
|
||||
"http://127.0.0.1:3000",
|
||||
],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Mount reports directory for file downloads
|
||||
reports_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), "reports")
|
||||
os.makedirs(reports_dir, exist_ok=True)
|
||||
app.mount("/reports", StaticFiles(directory=reports_dir), name="reports")
|
||||
|
||||
# Include routers
|
||||
app.include_router(analyze.router, prefix="/api", tags=["AI Governance"])
|
||||
app.include_router(clean.router, prefix="/api", tags=["Data Cleaning"])
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""Health check endpoint"""
|
||||
return {
|
||||
"status": "online",
|
||||
"service": "Nordic Privacy AI API",
|
||||
"version": "1.0.0",
|
||||
"endpoints": {
|
||||
"analyze": "/api/analyze",
|
||||
"clean": "/api/clean",
|
||||
"docs": "/docs"
|
||||
}
|
||||
}
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Detailed health check"""
|
||||
try:
|
||||
import torch
|
||||
cuda_available = torch.cuda.is_available()
|
||||
gpu_name = torch.cuda.get_device_name(0) if cuda_available else None
|
||||
except:
|
||||
cuda_available = False
|
||||
gpu_name = None
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"gpu_acceleration": {
|
||||
"available": cuda_available,
|
||||
"device": gpu_name or "CPU"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user