1from sklearn.metrics import confusion_matrix, classification_report, roc_auc_score
2from sklearn.model_selection import cross_val_score
3
4# Assume model is trained, y_test and y_pred are ready
5print("=== Confusion Matrix ===")
6print(confusion_matrix(y_test, y_pred))
7
8print("=== Classification Report ===")
9print(classification_report(y_test, y_pred))
10
11# ROC-AUC requires probabilities, not hard predictions
12y_prob = model.predict_proba(X_test)[:, 1]
13auc = roc_auc_score(y_test, y_prob)
14print(f"ROC-AUC: {auc:.3f}")
15
16# Cross-validated F1 for reliability
17cv_scores = cross_val_score(model, X, y, cv=5, scoring='f1')
18print(f"CV F1: {cv_scores.mean():.3f} +/- {cv_scores.std():.3f}")