Skip to content

Commit 047040d

Browse files
authored
fix: replace deprecated @app.on_event('startup') with lifespan context manager in main.py
1 parent f18979f commit 047040d

1 file changed

Lines changed: 36 additions & 24 deletions

File tree

backend/main.py

Lines changed: 36 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
from contextlib import asynccontextmanager
12
from fastapi import FastAPI, Depends, HTTPException
23
from fastapi.middleware.cors import CORSMiddleware
34
from sqlalchemy.orm import Session
@@ -10,36 +11,18 @@
1011

1112
# Add current directory to path so we can import from backend
1213
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
13-
1414
from database import SecurityLog, SessionLocal, engine, init_db
1515
import schemas
1616

17-
# Fix #5: Corrected typo "Thread" -> "Threat"
18-
app = FastAPI(title="AI Security Monitor", description="Real-time Threat Detection API")
19-
20-
# Fix #4: Added Docker service hostname 'frontend' to CORS origins
21-
# so the frontend container can reach the backend inside Docker network
22-
app.add_middleware(
23-
CORSMiddleware,
24-
allow_origins=[
25-
"http://localhost:3000",
26-
"http://frontend:3000",
27-
],
28-
allow_credentials=True,
29-
allow_methods=["*"],
30-
allow_headers=["*"],
31-
)
32-
33-
# Initialize DB
34-
init_db()
35-
3617
# Load Model (Global)
3718
model = None
3819
encoders = None
20+
3921
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
4022
MODEL_PATH = os.path.join(BASE_DIR, "../ai-model/isolation_forest_model.pkl")
4123
ENCODERS_PATH = os.path.join(BASE_DIR, "../ai-model/encoders.pkl")
4224

25+
4326
def load_model():
4427
global model, encoders
4528
if os.path.exists(MODEL_PATH):
@@ -52,9 +35,35 @@ def load_model():
5235
else:
5336
print("Model file not found. Run: python ai-model/train_model.py")
5437

55-
@app.on_event("startup")
56-
async def startup_event():
38+
39+
# Fix: Use lifespan context manager instead of deprecated @app.on_event('startup')
40+
@asynccontextmanager
41+
async def lifespan(app: FastAPI):
5742
load_model()
43+
init_db()
44+
yield
45+
46+
47+
# Fix #5: Corrected typo "Thread" -> "Threat"
48+
app = FastAPI(
49+
title="AI Security Monitor",
50+
description="Real-time Threat Detection API",
51+
lifespan=lifespan,
52+
)
53+
54+
# Fix #4: Added Docker service hostname 'frontend' to CORS origins
55+
# so the frontend container can reach the backend inside Docker network
56+
app.add_middleware(
57+
CORSMiddleware,
58+
allow_origins=[
59+
"http://localhost:3000",
60+
"http://frontend:3000",
61+
],
62+
allow_credentials=True,
63+
allow_methods=["*"],
64+
allow_headers=["*"],
65+
)
66+
5867

5968
# Dependency
6069
def get_db():
@@ -64,10 +73,12 @@ def get_db():
6473
finally:
6574
db.close()
6675

76+
6777
@app.get("/")
6878
def read_root():
6979
return {"message": "AI Security Monitor API is running"}
7080

81+
7182
@app.post("/logs/", response_model=schemas.LogResponse)
7283
def create_log(log: schemas.LogCreate, db: Session = Depends(get_db)):
7384
db_log = SecurityLog(**log.dict())
@@ -76,11 +87,13 @@ def create_log(log: schemas.LogCreate, db: Session = Depends(get_db)):
7687
db.refresh(db_log)
7788
return db_log
7889

90+
7991
@app.get("/logs/", response_model=List[schemas.LogResponse])
8092
def read_logs(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
8193
logs = db.query(SecurityLog).order_by(SecurityLog.timestamp.desc()).offset(skip).limit(limit).all()
8294
return logs
8395

96+
8497
@app.post("/predict/", response_model=schemas.PredictionResponse)
8598
def predict_anomaly(request: schemas.PredictionRequest):
8699
global model, encoders
@@ -98,10 +111,9 @@ def predict_anomaly(request: schemas.PredictionRequest):
98111
[[request.bytes_transferred, protocol_encoded]],
99112
columns=['bytes', 'protocol_encoded']
100113
)
101-
prediction = model.predict(features)[0] # -1 = anomaly, 1 = normal
114+
prediction = model.predict(features)[0] # -1 = anomaly, 1 = normal
102115
score = float(model.decision_function(features)[0])
103116
is_anomaly = prediction == -1
104-
105117
return {"is_anomaly": is_anomaly, "anomaly_score": score}
106118
except Exception as e:
107119
raise HTTPException(status_code=500, detail=str(e))

0 commit comments

Comments
 (0)