diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 0ef42e49..923620d1 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -44,14 +44,15 @@ jobs: which pytest python -m pytest --maxfail=2 env: - TEST_EMAIL_ACCOUNT: ${{ secrets.TEST_EMAIL_ACCOUNT }} - TEST_GOOGLE_CLIENT_ID: ${{ secrets.TEST_GOOGLE_CLIENT_ID }} - TEST_GOOGLE_CLIENT_SECRET: ${{ secrets.TEST_GOOGLE_CLIENT_SECRET }} - TEST_MONGO_URI: ${{ secrets.TEST_MONGO_URI }} - TEST_OPENAI_API_KEY: ${{ secrets.TEST_OPENAI_API_KEY }} - TEST_DEEPSEEK_API_KEY: ${{ secrets.TEST_DEEPSEEK_API_KEY }} - TEST_GOOGLE_API_KEY: ${{ secrets.TEST_GEMINI_API_KEY }} - TEST_SUMMARIZER_PROVIDER: ${{ secrets.TEST_SUMMARIZER_PROVIDER || 'openai' }} + EMAIL_ACCOUNT: ${{ secrets.TEST_EMAIL_ACCOUNT }} + GOOGLE_CLIENT_ID: ${{ secrets.TEST_GOOGLE_CLIENT_ID }} + GOOGLE_CLIENT_SECRET: ${{ secrets.TEST_GOOGLE_CLIENT_SECRET }} + MONGO_URI: ${{ secrets.TEST_MONGO_URI }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + OPENROUTER_API_KEY: ${{ secrets.OPENROUTER_API_KEY }} + DEEPSEEK_API_KEY: ${{ secrets.DEEPSEEK_API_KEY }} + GOOGLE_API_KEY: ${{ secrets.GEMINI_API_KEY }} + SUMMARIZER_PROVIDER: ${{ secrets.SUMMARIZER_PROVIDER || 'openrouter' }} # Add a placeholder job that will always run when tests are disabled test-skipped: diff --git a/README.md b/README.md index 9f3a4cfb..8e27d509 100644 --- a/README.md +++ b/README.md @@ -18,28 +18,28 @@ EmailEssence is a sophisticated email management solution that leverages artific ### Feature Complete (FC) Features - 🎨 Customizable dashboard with modular components - 🔍 Advanced keyword analysis and topic identification -- 💻 Cross-platform desktop support via Electron - 🔄 Incremental email fetching for large inboxes - 🎯 Smart email prioritization - 🛠️ Enhanced user preferences and settings +### Future Features +- 💻 Cross-platform desktop support via Electron + ## Technical Stack ### Frontend - React - Modern UI framework -- Remix - Full-stack web framework -- Electron - Desktop application framework -- JavaScript - Type-safe development +- Vite - Frontend tooling and build server +- JavaScript - Core language for the frontend ### Backend - Python - Core backend services - FastAPI - High-performance API framework - MongoDB - Flexible document database - Redis - High-performance caching -- OpenRouter - AI-powered email processing +- Flexible AI provider support (OpenAI, Google, OpenRouter) ### Infrastructure -- Express.js - Web server and middleware - OAuth 2.0 - Secure authentication - IMAP - Email protocol support diff --git a/backend/.env.example b/backend/.env.example index b1fbbe48..2459087c 100644 --- a/backend/.env.example +++ b/backend/.env.example @@ -9,9 +9,10 @@ GOOGLE_CLIENT_SECRET=google_client_secret MONGO_URI=mongodb+srv://:
@emailsummarization.1coye.mongodb.net/?retryWrites=true&w=majority&appName=EmailSummarization
# API keys
-OPENAI_API_KEY=openai_api_key
+OPEN_ROUTER_API_KEY=openrouter_api_key
+#OPENAI_API_KEY=openai_api_key
#DEEPSEEK_API_KEY=deepseek_api_key
-GOOGLE_API_KEY=google_api_key
+#GOOGLE_API_KEY=google_api_key
# Summarizer Settings
# SUMMARIZER_PROVIDER=openai
diff --git a/backend/README.md b/backend/README.md
index 2751cd23..175c5d70 100644
--- a/backend/README.md
+++ b/backend/README.md
@@ -23,9 +23,9 @@ For local development without Docker, use one of the provided setup scripts:
#### On Windows:
-```bash
+```ps1
# Run the setup script to create a virtual environment and install dependencies
-.\setup.bat
+.\\setup.ps1
```
These scripts will:
@@ -134,7 +134,7 @@ For Render deployments, environment variables are configured through the Render
- `google_client_secret`
- `email_account`
- `mongo_uri`
- - `openai_api_key`
+ - `openrouter_api_key`
- Any optional variables you wish to override
This separates your development environment configuration from your production deployment, following security best practices.
@@ -152,9 +152,6 @@ For CI/CD environments, use the CI setup scripts:
```bash
# Unix/Linux/macOS
./setup-ci.sh
-
-# Windows
-.\setup-ci.bat
```
## Troubleshooting
diff --git a/backend/app/dependencies.py b/backend/app/dependencies.py
new file mode 100644
index 00000000..f79f2204
--- /dev/null
+++ b/backend/app/dependencies.py
@@ -0,0 +1,130 @@
+"""
+Common dependencies for Email Essence FastAPI application.
+
+This module centralizes shared dependencies including authentication schemes,
+logging configuration, and common helper functions used across routers and services.
+"""
+
+import logging
+from typing import Dict, Any
+
+from fastapi import Depends, HTTPException, status
+from fastapi.security import OAuth2PasswordBearer
+
+# Internal imports
+from app.models.user_models import UserSchema
+from app.services.auth_service import AuthService
+from app.services.user_service import UserService
+from app.services.database.factories import get_auth_service, get_user_service
+from app.utils.helpers import get_logger, configure_module_logging, standardize_error_response, log_operation
+
+# -------------------------------------------------------------------------
+# Authentication Dependencies
+# -------------------------------------------------------------------------
+
+# Centralized OAuth2 scheme
+oauth2_scheme = OAuth2PasswordBearer(
+ tokenUrl="/auth/token",
+ description="Enter the token you received from the login flow (without Bearer prefix)"
+)
+
+async def get_current_user_email(
+ token: str = Depends(oauth2_scheme),
+ auth_service: AuthService = Depends(get_auth_service)
+) -> str:
+ """
+ Dependency to extract user email from valid token.
+
+ Args:
+ token: JWT token from OAuth2 authentication
+ auth_service: Auth service instance
+
+ Returns:
+ str: User's email address
+
+ Raises:
+ HTTPException: 401 error if token is invalid
+ """
+ try:
+ user_data = await auth_service.get_credentials_from_token(token)
+ return user_data['email']
+ except Exception as e:
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Invalid authentication credentials",
+ headers={"WWW-Authenticate": "Bearer"},
+ )
+
+async def get_current_user_info(
+ token: str = Depends(oauth2_scheme),
+ auth_service: AuthService = Depends(get_auth_service)
+) -> Dict[str, Any]:
+ """
+ Validates token and returns user information.
+
+ Args:
+ token: JWT token from OAuth2 authentication
+ auth_service: Auth service instance
+
+ Returns:
+ dict: User information and credentials
+
+ Raises:
+ HTTPException: 401 error if token is invalid
+ """
+ try:
+ user_data = await auth_service.get_credentials_from_token(token)
+ return user_data
+ except Exception as e:
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail=f"Invalid authentication: {str(e)}"
+ )
+
+async def get_current_user(
+ user_data: Dict[str, Any] = Depends(get_current_user_info),
+ user_service: UserService = Depends(get_user_service)
+) -> UserSchema:
+ """
+ Retrieve user details or create user if they don't exist.
+
+ Args:
+ user_data: User information and credentials from token validation
+ user_service: User service instance
+
+ Returns:
+ UserSchema: User profile information
+
+ Raises:
+ HTTPException: If user retrieval fails
+ """
+ try:
+ user_info = user_data['user_info']
+ user_email = user_info.get('email')
+ google_id = user_info.get('google_id')
+
+ # Try to get existing user
+ user = await user_service.get_user_by_email(user_email)
+
+ # If user doesn't exist, create new user
+ if not user:
+ user = await user_service.create_user({
+ "email": user_email,
+ "name": user_info.get("name", ""),
+ "picture": user_info.get("picture", ""),
+ "google_id": google_id
+ })
+ else:
+ # Update google_id if it's missing
+ user_dict = user.model_dump()
+ if not user_dict.get('google_id'):
+ await user_service.update_user(user_dict['_id'], {"google_id": google_id})
+
+ return user
+ except Exception as e:
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Failed to retrieve user: {str(e)}"
+ )
+
+
\ No newline at end of file
diff --git a/backend/app/routers/auth_router.py b/backend/app/routers/auth_router.py
index 47fac4b9..9ba23d40 100644
--- a/backend/app/routers/auth_router.py
+++ b/backend/app/routers/auth_router.py
@@ -6,74 +6,47 @@
via OAuth2 to retrieve and process email data.
"""
+# Standard library imports
+import base64
import json
import urllib.parse
-import base64
import uuid
-from typing import Dict, Optional, Any
-from functools import lru_cache
-from fastapi import APIRouter, HTTPException, status, Depends, Request, Query, Form
-from fastapi.security import OAuth2AuthorizationCodeBearer, OAuth2PasswordBearer
-from fastapi.responses import RedirectResponse, HTMLResponse, JSONResponse
+# Third-party imports
+from fastapi import APIRouter, Depends, Form, HTTPException, Query, status
+from fastapi.responses import HTMLResponse, RedirectResponse
from google.auth.transport.requests import Request as GoogleRequest
from google.oauth2.credentials import Credentials
-from starlette.concurrency import run_in_threadpool
-from pydantic import BaseModel, EmailStr
-from google_auth_oauthlib.flow import Flow
from googleapiclient.discovery import build
+from starlette.concurrency import run_in_threadpool
-from app.services.auth_service import AuthService, SCOPES
+# Internal imports
+from app.dependencies import get_current_user_email, oauth2_scheme
+from app.utils.helpers import get_logger, log_operation
+from app.models import (
+ AuthStatusResponse,
+ ExchangeCodeRequest,
+ RefreshTokenRequest,
+ TokenData,
+ TokenResponse,
+ VerifyTokenRequest,
+)
+from app.services.auth_service import SCOPES, AuthService
+from app.services.database.factories import get_auth_service, get_user_service
from app.services.user_service import UserService
from app.utils.config import get_settings
-from app.services.database.factories import get_auth_service, get_user_service
-from app.models import TokenData, TokenResponse, AuthStatusResponse, ExchangeCodeRequest, RefreshTokenRequest, VerifyTokenRequest
+
+# -------------------------------------------------------------------------
+# Router Configuration
+# -------------------------------------------------------------------------
router = APIRouter()
settings = get_settings()
+logger = get_logger(__name__, 'router')
-# -- Authentication Schemes --
-
-# This is a simpler authentication scheme for Swagger UI
-# It only shows a token field without client_id/client_secret
-oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/token", description="Enter the token you received from the login flow (without Bearer prefix)")
-
-# -- Authentication Utility --
-
-async def get_current_user_email(
- token: str = Depends(oauth2_scheme),
- auth_service: AuthService = Depends(get_auth_service)
-):
- """
- Dependency to extract user email from valid token.
- Will raise 401 automatically if token is invalid.
-
- Args:
- token: JWT token from OAuth2 authentication
- auth_service: Auth service instance
-
- Returns:
- str: User's email address
-
- Raises:
- HTTPException: 401 error if token is invalid
- """
- try:
- # Get user info from token
- user_data = await auth_service.get_credentials_from_token(token)
- return user_data['email']
- except Exception as e:
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail="Invalid authentication credentials",
- headers={"WWW-Authenticate": "Bearer"},
- )
-
-# -- Endpoints --
-
-# Debugging helper function
-def debug(message: str):
- print(f"[DEBUG] {message}")
+# -------------------------------------------------------------------------
+# Endpoints
+# -------------------------------------------------------------------------
@router.get(
"/login",
@@ -99,7 +72,7 @@ async def login(
Returns:
RedirectResponse: Redirects to Google's authentication page
"""
- debug(f"Login initiated - Redirect URI: {redirect_uri}")
+ log_operation(logger, 'debug', f"Login initiated - Redirect URI: {redirect_uri}")
try:
# Create a state object that includes the frontend redirect URI
@@ -115,17 +88,13 @@ async def login(
result = auth_service.create_authorization_url(encoded_custom_state)
authorization_url = result["authorization_url"]
- debug(f"Generated Google OAuth URL: {authorization_url}")
+ log_operation(logger, 'debug', f"Generated Google OAuth URL: {authorization_url}")
# Now redirect to the correct URL
return RedirectResponse(authorization_url)
except Exception as e:
- debug(f"[ERROR] Login failed: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Failed to create authorization URL: {str(e)}"
- )
+ raise standardize_error_response(e, "login")
@router.get("/callback")
async def callback(
@@ -146,7 +115,7 @@ async def callback(
Returns:
RedirectResponse: Redirects to frontend with authentication state
"""
- debug(f"Received callback with code: {code}")
+ log_operation(logger, 'debug', f"Received callback with code: {code}")
try:
if not state:
@@ -156,13 +125,13 @@ async def callback(
decoded_state = json.loads(base64.urlsafe_b64decode(state).decode())
frontend_url = decoded_state.get("redirect_uri")
- debug(f"Decoded state - Redirecting to frontend: {frontend_url}")
+ log_operation(logger, 'debug', f"Decoded state - Redirecting to frontend: {frontend_url}")
if not frontend_url:
raise ValueError("Missing redirect URI in state parameter")
# Exchange code for tokens and get user info in one step
- debug("Exchanging code for tokens and getting user info...")
+ log_operation(logger, 'debug', "Exchanging code for tokens and getting user info...")
token_data = await auth_service.get_tokens_from_code(code, None) # First exchange
# Get user info using the token
@@ -183,7 +152,7 @@ async def callback(
)
user_email = user_info.get('email')
- debug(f"User email retrieved: {user_email}")
+ log_operation(logger, 'debug', f"User email retrieved: {user_email}")
if not user_email:
raise ValueError("Could not retrieve user email from Google")
@@ -191,7 +160,7 @@ async def callback(
# Check if user exists, create if not
user = await user_service.get_user_by_email(user_email)
if not user:
- debug(f"Creating new user: {user_email}")
+ log_operation(logger, 'info', f"Creating new user: {user_email}")
user = await user_service.create_user({
"email": user_email,
"name": user_info.get("name", ""),
@@ -199,7 +168,7 @@ async def callback(
"google_id": user_info.get("id")
})
else:
- debug(f"Found existing user: {user_email}")
+ log_operation(logger, 'info', f"Found existing user: {user_email}")
# Special handling for Swagger UI testing
if "localhost:8000/docs" in frontend_url or "/docs" in frontend_url:
@@ -239,7 +208,7 @@ async def exchange_code(
Requires the user's email to associate the tokens.
"""
- debug(f"Exchanging OAuth code for user: {request.user_email}")
+ log_operation(logger, 'info', f"Exchanging OAuth code for user: {request.user_email}")
try:
if not request.code or not request.user_email:
raise HTTPException(
@@ -250,7 +219,7 @@ async def exchange_code(
# Exchange auth code for tokens and store them in MongoDB
tokens = await auth_service.get_tokens_from_code(request.code, request.user_email)
- debug(f"Token exchange successful for {request.user_email}")
+ log_operation(logger, 'debug', f"Token exchange successful for {request.user_email}")
return TokenResponse(
access_token=tokens.token,
token_type="bearer",
@@ -259,11 +228,8 @@ async def exchange_code(
)
except Exception as e:
- debug(f"[ERROR] Code exchange failed: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail=f"Failed to exchange code for tokens: {str(e)}"
- )
+ log_operation(logger, 'error', f"Code exchange failed: {str(e)}")
+ raise standardize_error_response(e, "exchange code")
@router.get("/token", response_model=TokenResponse)
async def get_token(
@@ -283,10 +249,7 @@ async def get_token(
token_type="bearer"
)
except Exception as e:
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail=f"Token retrieval failed: {str(e)}"
- )
+ raise standardize_error_response(e, "get token")
@router.post("/refresh", response_model=TokenResponse)
async def refresh_token(
@@ -313,10 +276,7 @@ async def refresh_token(
token_type="bearer"
)
except Exception as e:
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail=f"Token refresh failed: {str(e)}"
- )
+ raise standardize_error_response(e, "refresh token")
@router.get("/status", response_model=AuthStatusResponse)
async def auth_status(
@@ -331,14 +291,14 @@ async def auth_status(
try:
# Extract user info from the token
user_data = await auth_service.get_credentials_from_token(token)
- user_email = user_data['google_id']
+ user_google_id = user_data['google_id']
- debug(f"User google_id extracted from token: {google_id}")
+ log_operation(logger, 'debug', f"User google_id extracted from token: {user_google_id}")
# Get detailed credentials from the database using that email
try:
# Get the token record directly from the database instead of using get_credentials
- token_record = await auth_service.get_token_record(google_id)
+ token_record = await auth_service.get_token_record(user_google_id)
if not token_record:
return AuthStatusResponse(
@@ -364,7 +324,7 @@ async def auth_status(
except Exception as e:
# Token validation failed
- debug(f"[ERROR] Auth status check failed: {str(e)}")
+ log_operation(logger, 'error', f"Auth status check failed: {str(e)}")
return AuthStatusResponse(
is_authenticated=False,
token_valid=False,
@@ -587,7 +547,4 @@ async def token_endpoint(
"token_type": "bearer"
}
except Exception as e:
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail=f"Failed to store token: {str(e)}"
- )
\ No newline at end of file
+ raise standardize_error_response(e, "token endpoint")
\ No newline at end of file
diff --git a/backend/app/routers/emails_router.py b/backend/app/routers/emails_router.py
index 30ffdc36..91b89949 100644
--- a/backend/app/routers/emails_router.py
+++ b/backend/app/routers/emails_router.py
@@ -6,53 +6,30 @@
It provides a set of REST endpoints for interacting with the user's email data.
"""
-from fastapi import APIRouter, HTTPException, Query, Depends, status
-from fastapi.security import OAuth2PasswordBearer
-from typing import List, Optional
-from pydantic import BaseModel
-import logging
-from functools import lru_cache
+# Standard library imports
+from typing import Optional
-from app.models.email_models import EmailSchema, EmailResponse, ReaderViewResponse
+# Third-party imports
+from fastapi import APIRouter, Depends, HTTPException, Query, status
+
+# Internal imports
+from app.dependencies import get_current_user
+from app.utils.helpers import get_logger, log_operation, standardize_error_response
+from app.models.email_models import EmailResponse, EmailSchema, ReaderViewResponse
from app.models.user_models import UserSchema
-from app.routers.user_router import get_current_user
-from app.services.database.factories import get_email_repository, get_email_service
+from app.services.database.factories import get_email_service
from app.services.email_service import EmailService
-router = APIRouter()
-
-# Configure logging with format and level
-logging.basicConfig(
- level=logging.INFO,
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
- datefmt='%Y-%m-%d %H:%M:%S'
-)
-
-# Add specific configuration for pymongo's logger
-logging.getLogger('pymongo').setLevel(logging.WARNING)
+# -------------------------------------------------------------------------
+# Router Configuration
+# -------------------------------------------------------------------------
-# Create module-specific logger
-logger = logging.getLogger(__name__)
-
-@router.get("/search")
-async def search_emails_by_keyword(
- keyword: str,
- email_service: EmailService = Depends(get_email_service),
- user: UserSchema = Depends(get_current_user)
-):
- """
- Search emails using extracted summary keywords.
-
- Args:
- keyword: Keyword to search for
- email_service: Injected email service
- user: Current user (from token)
+router = APIRouter()
+logger = get_logger(__name__, 'router')
- Returns:
- List of matched emails based on summary keywords
- """
- logger.info(f"Search endpoint hit with keyword: {keyword}")
- return await email_service.search_emails_by_keyword(user.google_id, keyword)
+# -------------------------------------------------------------------------
+# Endpoints
+# -------------------------------------------------------------------------
@router.get(
"/",
@@ -111,8 +88,8 @@ async def retrieve_emails(
try:
# Log request parameters
- logger.debug(f"Email retrieval request with refresh={refresh}", extra={"params": debug_info["request_params"]})
- logger.debug(f"Google ID for email retrieval: {user.google_id}")
+ log_operation(logger, 'debug', f"Email retrieval request with refresh={refresh}", extra={"params": debug_info["request_params"]})
+ log_operation(logger, 'debug', f"Google ID for email retrieval: {user.google_id}")
emails, total, service_debug_info = await email_service.fetch_emails(
google_id=user.google_id,
@@ -128,7 +105,7 @@ async def retrieve_emails(
# Combine debug info
debug_info.update(service_debug_info)
- logger.info(f"Retrieved {len(emails)} emails out of {total} total")
+ log_operation(logger, 'info', f"Retrieved {len(emails)} emails out of {total} total")
return EmailResponse(
emails=emails,
@@ -138,9 +115,7 @@ async def retrieve_emails(
)
except Exception as e:
- error_msg = f"Failed to retrieve emails: {str(e)}"
- logger.exception(error_msg) # This logs the full stack trace
- raise HTTPException(status_code=500, detail=error_msg)
+ raise standardize_error_response(e, "retrieve emails")
@router.get(
"/{email_id}",
@@ -169,7 +144,11 @@ async def retrieve_email(
"""
email = await email_service.get_email(email_id, user.google_id)
if not email:
- raise HTTPException(status_code=404, detail="Email not found")
+ raise standardize_error_response(
+ Exception("Email not found"),
+ "get email",
+ email_id
+ )
return email
@router.put(
@@ -199,7 +178,11 @@ async def mark_email_as_read(
"""
updated_email = await email_service.mark_email_as_read(email_id, user.google_id)
if not updated_email:
- raise HTTPException(status_code=404, detail="Email not found")
+ raise standardize_error_response(
+ Exception("Email not found"),
+ "mark email as read",
+ email_id
+ )
return updated_email
@router.delete(
@@ -229,7 +212,11 @@ async def delete_email(
"""
success = await email_service.delete_email(email_id, user.google_id)
if not success:
- raise HTTPException(status_code=404, detail="Email not found")
+ raise standardize_error_response(
+ Exception("Email not found"),
+ "delete email",
+ email_id
+ )
return {"message": "Email deleted successfully"}
@router.get(
@@ -263,13 +250,15 @@ async def get_email_reader_view(
reader_content = await email_service.get_email_reader_view(email_id, user.google_id)
if not reader_content:
- raise HTTPException(status_code=404, detail="Email not found")
+ raise standardize_error_response(
+ Exception("Email not found"),
+ "get email reader view",
+ email_id
+ )
return reader_content
except Exception as e:
if isinstance(e, HTTPException):
raise e
- error_msg = f"Failed to generate reader view: {str(e)}"
- logger.exception(error_msg)
- raise HTTPException(status_code=500, detail=error_msg)
\ No newline at end of file
+ raise standardize_error_response(e, "generate reader view", email_id)
\ No newline at end of file
diff --git a/backend/app/routers/summaries_router.py b/backend/app/routers/summaries_router.py
index 7b4ac071..66ea7c67 100644
--- a/backend/app/routers/summaries_router.py
+++ b/backend/app/routers/summaries_router.py
@@ -6,41 +6,40 @@
strategies to provide concise representations of emails.
"""
+# Standard library imports
import logging
-from typing import List, Optional, Annotated
-from fastapi import APIRouter, HTTPException, Depends, Query, Path, status
-from contextlib import asynccontextmanager
+from typing import List
-from app.utils.config import Settings, get_settings, SummarizerProvider
+# Third-party imports
+from fastapi import APIRouter, Depends, HTTPException, Path, Query, status
+
+# Internal imports
+from app.dependencies import get_current_user
+from app.utils.helpers import get_logger, log_operation, standardize_error_response
from app.models import EmailSchema, SummarySchema, UserSchema
-from app.services import EmailService, SummaryService
-from app.services.summarization import get_summarizer
-from app.services.summarization.base import AdaptiveSummarizer
+from app.services import SummaryService
+from app.services.database.factories import get_email_service, get_summary_service
from app.services.summarization import (
- ProcessingStrategy,
- OpenAIEmailSummarizer,
- GeminiEmailSummarizer
-)
-from app.routers.user_router import get_current_user
-from app.services.database.factories import (
- get_summary_service,
- get_email_service
+ GeminiEmailSummarizer,
+ OpenAIEmailSummarizer,
+ ProcessingStrategy,
+ get_summarizer,
)
+from app.services.summarization.base import AdaptiveSummarizer
-# Configure logging with format and level
-logging.basicConfig(
- level=logging.INFO,
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
- datefmt='%Y-%m-%d %H:%M:%S'
-)
+# -------------------------------------------------------------------------
+# Router Configuration
+# -------------------------------------------------------------------------
+
+router = APIRouter()
# Add specific configuration for pymongo's logger
logging.getLogger('pymongo').setLevel(logging.WARNING)
+logger = get_logger(__name__, 'router')
-# Create module-specific logger
-logger = logging.getLogger(__name__)
-
-router = APIRouter()
+# -------------------------------------------------------------------------
+# Endpoints
+# -------------------------------------------------------------------------
@router.get(
"/batch",
@@ -87,15 +86,17 @@ async def get_summaries_by_ids(
if not result['summaries']:
if result['missing_emails'] and not result['failed_summaries']:
# Only missing emails, no generation failures
- raise HTTPException(
- status_code=status.HTTP_404_NOT_FOUND,
- detail=f"Emails not found: {result['missing_emails']}"
+ raise standardize_error_response(
+ Exception("Emails not found"),
+ "get summaries by ids",
+ result['missing_emails']
)
elif result['failed_summaries'] and not result['missing_emails']:
# Only generation failures, no missing emails
- raise HTTPException(
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
- detail=f"Failed to generate summaries for emails: {result['failed_summaries']}"
+ raise standardize_error_response(
+ Exception("Failed to generate summaries"),
+ "get summaries by ids",
+ result['failed_summaries']
)
else:
# Both missing emails and generation failures
@@ -103,14 +104,15 @@ async def get_summaries_by_ids(
"missing_emails": result['missing_emails'],
"failed_summaries": result['failed_summaries']
}
- raise HTTPException(
- status_code=status.HTTP_404_NOT_FOUND,
- detail=f"No summaries could be generated: {error_details}"
+ raise standardize_error_response(
+ Exception("No summaries could be generated"),
+ "get summaries by ids",
+ error_details
)
# If we have some successful summaries but also some failures, log a warning
if result['missing_emails'] or result['failed_summaries']:
- logger.warning(
+ log_operation(logger, 'warning', f"Partial success for user {user.google_id}: "
f"Partial success for user {user.google_id}: "
f"{len(result['summaries'])} successful, "
f"{len(result['missing_emails'])} missing, "
@@ -123,11 +125,7 @@ async def get_summaries_by_ids(
# Re-raise HTTP exceptions as-is
raise
except Exception as e:
- logger.error(f"Error retrieving/generating summaries by IDs: {str(e)}", exc_info=True)
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Failed to retrieve email summaries: {str(e)}"
- )
+ raise standardize_error_response(e, "retrieve/generate summaries by IDs")
@router.get(
"/",
@@ -242,12 +240,7 @@ async def get_summaries(
)
except Exception as e:
- # Log the full error for debugging
- logger.error(f"Error processing summaries: {str(e)}", exc_info=True)
- raise HTTPException(
- status_code=500,
- detail="Failed to process email summaries"
- )
+ raise standardize_error_response(e, "process email summaries")
@router.get(
"/recent/{days}",
@@ -274,7 +267,7 @@ async def get_recent_summaries(
"""
try:
# Log request parameters
- logger.debug(f"Getting recent summaries for user {user.email} - days: {days}, limit: {limit}")
+ log_operation(logger, 'debug', f"Getting recent summaries for user {user.email} - days: {days}, limit: {limit}")
# Get summaries from service
summaries = await summary_service.get_recent_summaries(
@@ -283,22 +276,10 @@ async def get_recent_summaries(
google_id=user.google_id
)
- logger.debug(f"Retrieved {len(summaries)} summaries for user {user.email}")
+ log_operation(logger, 'debug', f"Retrieved {len(summaries)} summaries for user {user.email}")
return summaries
except Exception as e:
- logger.error(
- f"Error retrieving recent summaries for user {user.email}: {str(e)}",
- exc_info=True,
- extra={
- "user_email": user.email,
- "days": days,
- "limit": limit
- }
- )
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to retrieve recent summaries"
- )
+ raise standardize_error_response(e, "retrieve recent summaries")
@router.get(
"/keyword/{keyword}",
@@ -331,11 +312,7 @@ async def search_by_keyword(
results = await summary_service.search_by_keywords([keyword], limit=limit, google_id=user.google_id)
return results
except Exception as e:
- logging.error(f"Error searching summaries by keyword: {str(e)}", exc_info=True)
- raise HTTPException(
- status_code=500,
- detail="Failed to search summaries"
- )
+ raise standardize_error_response(e, "search summaries by keyword")
@router.get("/{email_id}", response_model=SummarySchema)
async def get_summary_by_id(
@@ -360,19 +337,16 @@ async def get_summary_by_id(
# Get summary from repository
summary = await summary_service.get_or_create_summary(email_id, summarizer, user.google_id)
if not summary:
- raise HTTPException(
- status_code=status.HTTP_404_NOT_FOUND,
- detail=f"Summary not found for email {email_id}"
+ raise standardize_error_response(
+ Exception("Summary not found"),
+ "get summary",
+ email_id
)
return SummarySchema(**summary)
except Exception as e:
- logger.error(f"Error retrieving/generating summary: {e}", exc_info=True)
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=str(e)
- )
+ raise standardize_error_response(e, "retrieve/generate summary", email_id)
@router.post(
"/summarize",
@@ -424,11 +398,7 @@ async def summarize_single_email(
return summary
except Exception as e:
- logging.error(f"Error summarizing email: {str(e)}", exc_info=True)
- raise HTTPException(
- status_code=500,
- detail="Failed to generate email summary"
- )
+ raise standardize_error_response(e, "generate email summary")
@router.delete(
"/{email_id}",
@@ -457,8 +427,9 @@ async def delete_summary(
"""
deleted = await summary_service.delete_summary(email_id, user.google_id)
if not deleted:
- raise HTTPException(
- status_code=404,
- detail=f"Summary for email {email_id} not found"
+ raise standardize_error_response(
+ Exception("Summary not found"),
+ "delete summary",
+ email_id
)
return {"message": f"Summary for email {email_id} deleted"}
diff --git a/backend/app/routers/user_router.py b/backend/app/routers/user_router.py
index 9363b8c3..2c9133ec 100644
--- a/backend/app/routers/user_router.py
+++ b/backend/app/routers/user_router.py
@@ -5,61 +5,29 @@
It provides endpoints for retrieving and updating user information and preferences.
"""
-from fastapi import APIRouter, HTTPException, status, Depends
-from fastapi.security import OAuth2PasswordBearer
-from fastapi.responses import JSONResponse
-from functools import lru_cache
-from typing import Optional, Dict, Any
+# Third-party imports
+from fastapi import APIRouter, Depends, HTTPException, status
-from google.auth.transport.requests import Request as GoogleRequest
-from google.oauth2.credentials import Credentials
-from googleapiclient.discovery import build
-from starlette.concurrency import run_in_threadpool
-
-from app.models import UserSchema, PreferencesSchema
+# Internal imports
+from app.dependencies import get_current_user, get_current_user_info
+from app.utils.helpers import get_logger, log_operation, standardize_error_response
+from app.models import PreferencesSchema, UserSchema
from app.services.auth_service import AuthService
from app.services.user_service import UserService
-from app.services.database.factories import get_user_service, get_auth_service
-
-router = APIRouter()
+from app.services.email_service import EmailService
+from app.services.summarization.summary_service import SummaryService
+from app.services.database.factories import get_auth_service, get_user_service, get_email_service, get_summary_service
-# OAuth authentication scheme
-oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/token", description="Enter the token you received from the login flow (without Bearer prefix)")
+# -------------------------------------------------------------------------
+# Router Configuration
+# -------------------------------------------------------------------------
-# Debugging helper function
-def debug(message: str):
- """Print debug messages with a consistent format"""
- print(f"[DEBUG] {message}")
+router = APIRouter()
+logger = get_logger(__name__, 'router')
-async def get_current_user_info(
- token: str = Depends(oauth2_scheme),
- auth_service: AuthService = Depends(get_auth_service)
-):
- """
- Validates token and returns user information.
-
- Args:
- token: JWT token from OAuth2 authentication
- auth_service: Auth service instance
-
- Returns:
- dict: User information and credentials
-
- Raises:
- HTTPException: 401 error if token is invalid
- """
- debug(f"Validating token for user authentication...")
-
- try:
- user_data = await auth_service.get_credentials_from_token(token)
- debug(f"User authenticated successfully: {user_data.get('user_info', {}).get('email', 'Unknown')}")
- return user_data
- except Exception as e:
- debug(f"[ERROR] Authentication failed: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail=f"Invalid authentication: {str(e)}"
- )
+# -------------------------------------------------------------------------
+# Endpoints
+# -------------------------------------------------------------------------
@router.get(
"/me",
@@ -67,61 +35,20 @@ async def get_current_user_info(
summary="Get current user profile",
description="Retrieves the authenticated user's profile information or creates a new user record if one doesn't exist"
)
-async def get_current_user(
- user_data: dict = Depends(get_current_user_info),
- user_service: UserService = Depends(get_user_service)
+async def get_current_user_profile(
+ user: UserSchema = Depends(get_current_user)
):
"""
- Retrieve user details or create user if they don't exist.
+ Retrieve current user profile.
Args:
- user_data: User information and credentials from token validation
- user_service: User service instance
+ user: Current authenticated user from dependency
Returns:
UserSchema: User profile information
-
- Raises:
- HTTPException: If user retrieval fails
"""
- debug("Retrieving current user...")
-
- try:
- user_info = user_data['user_info']
- user_email = user_info.get('email')
- google_id = user_info.get('google_id')
-
- debug(f"Fetching user from database or creating new: {user_email}")
-
- # Try to get existing user
- user = await user_service.get_user_by_email(user_email)
-
- # If user doesn't exist, create new user
- if not user:
- debug(f"Creating new user: {user_email}")
- user = await user_service.create_user({
- "email": user_email,
- "name": user_info.get("name", ""),
- "picture": user_info.get("picture", ""),
- "google_id": google_id
- })
- else:
- debug(f"Found existing user: {user_email}")
- # Convert UserSchema to dict for checking google_id
- user_dict = user.dict()
- # Update google_id if it's missing
- if not user_dict.get('google_id'):
- debug(f"Updating missing google_id for user: {user_email}")
- await user_service.update_user(user_dict['_id'], {"google_id": google_id})
-
- debug(f"User retrieval successful: {user_email}")
- return user
- except Exception as e:
- debug(f"[ERROR] Failed to retrieve user: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Failed to retrieve user: {str(e)}"
- )
+ logger.debug(f"User profile retrieved: {user.email}")
+ return user
@router.get(
"/preferences",
@@ -145,34 +72,28 @@ async def get_user_preferences(
Raises:
HTTPException: If preferences cannot be retrieved
"""
- debug("Retrieving user preferences...")
+ logger.debug("Retrieving user preferences...")
try:
user_info = user_data['user_info']
user_email = user_info.get('email')
- debug(f"Fetching preferences for user: {user_email}")
+ logger.debug(f"Fetching preferences for user: {user_email}")
# Get user first to ensure they exist
user = await user_service.get_user_by_email(user_email)
if not user:
- debug(f"User not found: {user_email}")
+ logger.debug(f"User not found: {user_email}")
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
- preferences = user.preferences.dict()
- debug(f"Preferences retrieved successfully for user: {user_email}")
+ preferences = user.preferences.model_dump()
+ logger.debug(f"Preferences retrieved successfully for user: {user_email}")
return {"preferences": preferences}
- except HTTPException:
- raise
except Exception as e:
- debug(f"[ERROR] Failed to retrieve user preferences: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Failed to retrieve user preferences: {str(e)}"
- )
+ raise standardize_error_response(e, "retrieve user preferences")
@router.put(
"/preferences",
@@ -198,36 +119,36 @@ async def update_preferences(
Raises:
HTTPException: If preference update fails
"""
- debug("Updating user preferences...")
+ logger.debug("Updating user preferences...")
try:
user_info = user_data['user_info']
user_email = user_info.get('email')
- debug(f"Updating preferences for user: {user_email}")
+ logger.debug(f"Updating preferences for user: {user_email}")
# Get user first to ensure they exist
user = await user_service.get_user_by_email(user_email)
if not user:
- debug(f"User not found: {user_email}")
+ logger.debug(f"User not found: {user_email}")
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
- debug(f"Found user with ID: {user.google_id}")
- debug(f"Current user data: {user.dict()}")
+ logger.debug(f"Found user with ID: {user.google_id}")
+ logger.debug(f"Current user data: {user.model_dump()}")
# Create update data with existing user fields and new preferences
update_data = {
"google_id": user.google_id,
"email": user.email,
"name": user.name,
- "oauth": user.oauth.dict() if hasattr(user, 'oauth') else {},
- "preferences": preferences.dict()
+ "oauth": user.oauth.model_dump() if hasattr(user, 'oauth') and user.oauth else {},
+ "preferences": preferences.model_dump()
}
- debug(f"Update data: {update_data}")
+ logger.debug(f"Update data: {update_data}")
# Update user with new preferences
try:
@@ -236,30 +157,20 @@ async def update_preferences(
update_data
)
except Exception as e:
- debug(f"Error updating user: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Failed to update user: {str(e)}"
- )
+ raise standardize_error_response(e, "update preferences")
if not updated_user:
- debug("Update returned None")
+ logger.debug("Update returned None")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to update preferences"
)
- debug(f"Updated user data: {updated_user.dict()}")
- debug(f"Preferences updated successfully for user: {user_email}")
- return {"preferences": updated_user.preferences.dict()}
- except HTTPException:
- raise
+ logger.debug(f"Updated user data: {updated_user.model_dump()}")
+ logger.debug(f"Preferences updated successfully for user: {user_email}")
+ return {"preferences": updated_user.preferences.model_dump()}
except Exception as e:
- debug(f"[ERROR] Failed to update preferences: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Failed to update preferences: {str(e)}"
- )
+ raise standardize_error_response(e, "update preferences")
@router.get("/{user_id}", response_model=UserSchema)
async def get_user(
@@ -283,9 +194,10 @@ async def get_user(
"""
user = await user_service.get_user(user_id)
if not user:
- raise HTTPException(
- status_code=status.HTTP_404_NOT_FOUND,
- detail="User not found"
+ raise standardize_error_response(
+ Exception("User not found"),
+ "get user",
+ user_id
)
return user
@@ -309,9 +221,10 @@ async def get_user_by_email(
"""
user = await user_service.get_user_by_email(email)
if not user:
- raise HTTPException(
- status_code=status.HTTP_404_NOT_FOUND,
- detail="User not found"
+ raise standardize_error_response(
+ Exception("User not found"),
+ "get user by email",
+ email
)
return user
@@ -356,9 +269,10 @@ async def update_user(
"""
user = await user_service.update_user(user_id, user_data)
if not user:
- raise HTTPException(
- status_code=status.HTTP_404_NOT_FOUND,
- detail="User not found"
+ raise standardize_error_response(
+ Exception("User not found"),
+ "update user",
+ user_id
)
return user
@@ -366,7 +280,9 @@ async def update_user(
async def delete_user(
user_id: str,
user_service: UserService = Depends(get_user_service),
- auth_service: AuthService = Depends(get_auth_service)
+ auth_service: AuthService = Depends(get_auth_service),
+ email_service: EmailService = Depends(get_email_service),
+ summary_service: SummaryService = Depends(get_summary_service)
) -> dict:
"""
Delete a user.
@@ -375,6 +291,8 @@ async def delete_user(
user_id: The ID of the user to delete
user_service: Injected UserService instance
auth_service: Injected AuthService instance
+ email_service: Injected EmailService instance
+ summary_service: Injected SummaryService instance
Returns:
dict: Success message
@@ -382,11 +300,31 @@ async def delete_user(
Raises:
HTTPException: 404 if user not found
"""
- success = await user_service.delete_user(user_id)
- if not success:
- raise HTTPException(
- status_code=status.HTTP_404_NOT_FOUND,
- detail="User not found"
+ successDeleteUser = await user_service.delete_user(user_id)
+ if not successDeleteUser:
+ raise standardize_error_response(
+ HTTPException(status_code=404, detail="User not found"),
+ action="delete user",
+ context=user_id
)
+
+ successDeleteEmails = await email_service.delete_emails(user_id)
+ if not successDeleteEmails:
+ raise standardize_error_response(
+ HTTPException(status_code=500, detail="Failed to delete user emails"),
+ action="delete emails by user ID",
+ context=user_id
+ )
+
+ # Note: This is commented out to avoid having to inccur the cost of deleting and resummarizing
+
+ #successDeleteSummaries = await summary_service.delete_summaries_by_google_id(user_id)
+ #if not successDeleteSummaries:
+ # raise standardize_error_response(
+ # HTTPException(status_code=404, detail="Summaries not found"),
+ # action="delete summaries by user ID",
+ # context=user_id
+ #)
+
return {"message": "User deleted successfully"}
\ No newline at end of file
diff --git a/backend/app/services/auth_service.py b/backend/app/services/auth_service.py
index 622da344..b7fe871d 100644
--- a/backend/app/services/auth_service.py
+++ b/backend/app/services/auth_service.py
@@ -5,32 +5,36 @@
and user authentication with Google.
"""
-import logging
+# Standard library imports
import os
-from typing import Optional, Dict, Any
from datetime import datetime, timedelta
+from typing import Any, Dict, Optional
+
+# Third-party imports
from fastapi import HTTPException, status
+from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import Flow
-from google.auth.transport.requests import Request
from googleapiclient.discovery import build
from starlette.concurrency import run_in_threadpool
-# Import from app modules
-from app.models import TokenData, AuthState
-from app.services.database import TokenRepository, UserRepository, get_token_repository, get_user_repository
+# Internal imports
+from app.utils.helpers import get_logger, log_operation, standardize_error_response
+from app.models import AuthState, TokenData, UserSchema
+from app.services.database import (
+ TokenRepository,
+ UserRepository,
+ get_token_repository,
+ get_user_repository,
+)
from app.services.user_service import UserService
from app.utils.config import Settings, get_settings
-# Configure logging
-logging.basicConfig(
- level=logging.DEBUG,
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
- datefmt='%Y-%m-%d %H:%M:%S'
-)
-
-logger = logging.getLogger(__name__)
+# -------------------------------------------------------------------------
+# Configuration
+# -------------------------------------------------------------------------
+logger = get_logger(__name__, 'service')
settings = get_settings()
SCOPES = [
@@ -86,56 +90,55 @@ async def verify_user_access(
Raises:
HTTPException: 403 if access is denied
"""
- logger.debug(f"Verifying user access for user ID: {user_id}")
+ log_operation(logger, 'debug', f"Verifying user access for user ID: {user_id}")
try:
# Get the current user's email from the token data
token_record = await self.get_token_record(current_user_data['google_id'])
if not token_record:
- logger.warning(f"No token record found for user: {current_user_data['google_id']}")
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail="No valid token record found"
+ log_operation(logger, 'warning', f"No token record found for user: {current_user_data['google_id']}")
+ raise standardize_error_response(
+ Exception("No valid token record found"),
+ "verify user access",
+ current_user_data['google_id']
)
current_user = await user_service.get_user(current_user_data['user_info']['id'])
if not current_user:
- raise HTTPException(
- status_code=status.HTTP_404_NOT_FOUND,
- detail="Current user not found"
+ raise standardize_error_response(
+ Exception("Current user not found"),
+ "verify user access",
+ current_user_data['google_id']
)
# Allow access if:
# 1. User is accessing their own data
# 2. User has admin privileges
if current_user['google_id'] == user_id or current_user.get('is_admin', False):
- logger.debug(f"Access granted for user ID: {user_id}")
+ log_operation(logger, 'debug', f"Access granted for user ID: {user_id}")
return True
- logger.debug(f"Access denied for user ID: {user_id}")
- raise HTTPException(
- status_code=status.HTTP_403_FORBIDDEN,
- detail="You do not have permission to access this resource"
+ log_operation(logger, 'debug', f"Access denied for user ID: {user_id}")
+ raise standardize_error_response(
+ Exception("You do not have permission to access this resource"),
+ "verify user access",
+ user_id
)
- except HTTPException:
- raise
except Exception as e:
- logger.error(f"Access verification failed: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Failed to verify access: {str(e)}"
- )
+ raise standardize_error_response(e, "verify user access", user_id)
def create_authorization_url(self, custom_state=None) -> Dict[str, str]:
"""Generates Google OAuth2 authorization URL."""
- logger.debug("Generating Google OAuth2 authorization URL...")
+ log_operation(logger, 'debug', "Generating Google OAuth2 authorization URL...")
client_id = settings.google_client_id
client_secret = settings.google_client_secret
if not client_id or not client_secret:
- logger.error("Google API credentials missing.")
- raise HTTPException(status_code=500, detail="Google API credentials not found in settings.")
+ raise standardize_error_response(
+ Exception("Google API credentials missing"),
+ "create authorization URL"
+ )
client_config = {
"web": {
@@ -150,7 +153,7 @@ def create_authorization_url(self, custom_state=None) -> Dict[str, str]:
flow = Flow.from_client_config(client_config, SCOPES)
flow.redirect_uri = self.get_redirect_uri()
- logger.debug(f"Using redirect URI: {flow.redirect_uri}")
+ log_operation(logger, 'debug', f"Using redirect URI: {flow.redirect_uri}")
if custom_state:
authorization_url, _ = flow.authorization_url(
@@ -236,11 +239,7 @@ async def get_tokens_from_code(self, code: str, email: str) -> TokenData:
return token
except Exception as e:
- logger.error(f"Failed to get tokens for user {email}: {e}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to get tokens"
- )
+ raise standardize_error_response(e, "get tokens from code", email)
async def get_current_user(self, email: str) -> Optional[Dict[str, Any]]:
"""
@@ -264,11 +263,7 @@ async def get_current_user(self, email: str) -> Optional[Dict[str, Any]]:
))
return user.model_dump()
except Exception as e:
- logger.error(f"Failed to get current user: {e}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to get current user"
- )
+ raise standardize_error_response(e, "get current user", email)
async def get_token_data(self, google_id: str) -> Optional[TokenData]:
"""
@@ -283,18 +278,14 @@ async def get_token_data(self, google_id: str) -> Optional[TokenData]:
try:
return await self.token_repository.find_by_google_id(google_id)
except Exception as e:
- logger.error(f"Failed to get token record for google_id {google_id}: {e}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to get token record"
- )
+ raise standardize_error_response(e, "get token data", google_id)
def get_redirect_uri(self):
"""Returns the OAuth redirect URI."""
- logger.debug("Retrieving redirect URI...")
+ log_operation(logger, 'debug', "Retrieving redirect URI...")
if callback_url := settings.oauth_callback_url:
- logger.debug(f"Using env-specified callback URL: {callback_url}")
+ log_operation(logger, 'debug', f"Using env-specified callback URL: {callback_url}")
return callback_url
environment = settings.environment
@@ -311,7 +302,7 @@ async def get_credentials_from_token(self, token: str):
Validates a token and returns user information from Google.
Used for authenticating API requests.
"""
- logger.debug("Validating access token and retrieving user info...")
+ log_operation(logger, 'debug', "Validating access token and retrieving user info...")
try:
# First try to validate the token directly
@@ -331,13 +322,13 @@ async def get_credentials_from_token(self, token: str):
service.userinfo().get().execute()
)
except Exception as e:
- logger.debug(f"Initial token validation failed, attempting refresh: {e}")
+ log_operation(logger, 'debug', f"Initial token validation failed, attempting refresh: {e}")
# If token validation fails, try to get a new token using refresh token
token_record = await self.token_repository.find_by_token(token)
if not token_record or not token_record.refresh_token:
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail="Invalid or expired token"
+ raise standardize_error_response(
+ Exception("Invalid or expired token"),
+ "get credentials from token"
)
# Create credentials with refresh token
@@ -369,13 +360,13 @@ async def get_credentials_from_token(self, token: str):
)
if not user_info or not user_info.get('email'):
- logger.error("Unable to retrieve user email from token.")
+ log_operation(logger, 'error', "Unable to retrieve user email from token.")
raise ValueError("Unable to retrieve user email from token")
# Add google_id to user_info
user_info['google_id'] = user_info.get('id')
- logger.info(f"User info retrieved for: {user_info.get('email')}")
+ log_operation(logger, 'info', f"User info retrieved for: {user_info.get('email')}")
return {
'user_info': user_info,
@@ -384,8 +375,7 @@ async def get_credentials_from_token(self, token: str):
}
except Exception as e:
- logger.exception("Token validation failed.")
- raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=f"Invalid token: {str(e)}")
+ raise standardize_error_response(e, "get credentials from token")
async def get_token_record(self, google_id: str) -> Optional[Dict[str, Any]]:
"""
@@ -398,16 +388,15 @@ async def get_token_record(self, google_id: str) -> Optional[Dict[str, Any]]:
Optional[Dict[str, Any]]: Complete token record if found, None otherwise
"""
try:
- logger.debug(f"Getting token record for google_id: {google_id}")
+ log_operation(logger, 'debug', f"Getting token record for google_id: {google_id}")
token_data = await self.token_repository.find_by_google_id(google_id)
if not token_data:
- logger.warning(f"No token record found for google_id: {google_id}")
+ log_operation(logger, 'warning', f"No token record found for google_id: {google_id}")
return None
- logger.info(f"Found token record for google_id: {google_id}")
+ log_operation(logger, 'info', f"Found token record for google_id: {google_id}")
# Convert TokenData to dict if it's a model instance
if hasattr(token_data, 'model_dump'):
return token_data.model_dump()
return token_data
except Exception as e:
- logger.error(f"Failed to get token record for google_id {google_id}: {e}")
- return None
+ raise standardize_error_response(e, "get token record", google_id)
diff --git a/backend/app/services/database/connection.py b/backend/app/services/database/connection.py
index 6b3284bd..7d65a16d 100644
--- a/backend/app/services/database/connection.py
+++ b/backend/app/services/database/connection.py
@@ -33,15 +33,17 @@ async def initialize(self):
if self._client is None:
try:
settings = get_settings()
-
- # Configure connection options
+
+ # Define valid connection options
connection_options = {
- "serverSelectionTimeoutMS": 5000, # 5 second timeout
- "connectTimeoutMS": 10000, # 10 second connection timeout
- "retryWrites": True, # Enable retryable writes
- "retryReads": True, # Enable retryable reads
+ #"serverSelectionTimeoutMS": 5000, # 5 second timeout
+ #"connectTimeoutMS": 10000, # 10 second connection timeout
+ "retryWrites": True, # Enable retryable writes on bad connections
+ "retryReads": True, # Enable retryable reads on bad connections
+ "maxPoolSize": 100, # Maximum number of connections in the pool
+ "minPoolSize": 2, # Minimum number of connections in the pool
}
-
+
self._client = AsyncIOMotorClient(
settings.mongo_uri,
**connection_options
diff --git a/backend/app/services/database/repositories/base_repository.py b/backend/app/services/database/repositories/base_repository.py
index a9a6a867..bb1fd2e9 100644
--- a/backend/app/services/database/repositories/base_repository.py
+++ b/backend/app/services/database/repositories/base_repository.py
@@ -131,8 +131,7 @@ async def find_many(
query: Dict[str, Any],
limit: int = 100,
skip: int = 0,
- sort: List[tuple] = None,
- projection: Optional[Dict[str, int]] = None
+ sort: List[tuple] = None
) -> List[T]:
"""
Find multiple documents matching the query with pagination support.
@@ -142,13 +141,12 @@ async def find_many(
limit: Maximum number of documents to return
skip: Number of documents to skip
sort: List of (field, direction) tuples for sorting
- projection: Dictionary specifying fields to include/exclude (e.g., {"email_id": 1})
Returns:
List[T]: List of matching documents
"""
try:
- cursor = self._get_collection().find(query, projection) if projection else self._get_collection().find(query)
+ cursor = self._get_collection().find(query)
if sort:
cursor = cursor.sort(sort)
@@ -272,6 +270,22 @@ async def delete_one(self, query: Dict[str, Any]) -> bool:
return result.deleted_count > 0
except Exception as e:
raise
+
+ async def delete_many(self, query: Dict[str, Any]) -> bool:
+ """
+ Delete multiple documents matching the query.
+
+ Args:
+ query: MongoDB query filter
+
+ Returns:
+ bool: True if deletion successful
+ """
+ try:
+ result = await self._get_collection().delete_many(query)
+ return result.deleted_count > 0
+ except Exception as e:
+ raise
async def count_documents(self, query: Dict[str, Any]) -> int:
"""
diff --git a/backend/app/services/database/repositories/email_repository.py b/backend/app/services/database/repositories/email_repository.py
index 0bec7afa..1275dee7 100644
--- a/backend/app/services/database/repositories/email_repository.py
+++ b/backend/app/services/database/repositories/email_repository.py
@@ -82,6 +82,18 @@ async def update_by_email_and_google_id(
update_data
)
+ async def delete_by_google_id(self, google_id: str) -> bool:
+ """
+ Delete all emails by Google user ID.
+
+ Args:
+ google_id: Google ID of the user
+
+ Returns:
+ bool: True if deletion successful
+ """
+ return await self.delete_many({"google_id": google_id})
+
async def delete_by_email_and_google_id(self, email_id: str, google_id: str) -> bool:
"""
Delete an email by IMAP UID and Google user ID.
diff --git a/backend/app/services/database/repositories/summary_repository.py b/backend/app/services/database/repositories/summary_repository.py
index c3597497..cfe5d05f 100644
--- a/backend/app/services/database/repositories/summary_repository.py
+++ b/backend/app/services/database/repositories/summary_repository.py
@@ -59,28 +59,6 @@ async def find_by_google_id(self, google_id: str) -> List[SummarySchema]:
"""
return await self.find_many({"google_id": google_id})
- async def find_email_ids_by_keyword(self, google_id: str, keyword: str) -> List[str]:
- """
- Search for emails using summary keywords.
-
- Args:
- google_id: Google ID of the user.
- keyword: Keyword to search in the summary keywords.
- limit: Maximum number of emails to return.
-
- Returns:
- List[EmailSchema]: List of emails whose summaries match the keyword.
- """
-
- query = {
- "google_id": google_id,
- "keywords": {"$regex": keyword, "$options": "i"}
- }
- raw_results = await self._get_collection().find(query, {"email_id": 1}).to_list(length=100)
-
- return [doc["email_id"] for doc in raw_results if "email_id" in doc]
-
-
async def update_by_email_id(
self,
email_id: str,
@@ -116,14 +94,26 @@ async def delete_by_email_and_google_id(self, email_id: str, google_id: str) ->
bool: True if deletion successful
"""
return await self.delete_one({"email_id": email_id, "google_id": google_id})
+
+ async def delete_by_google_id(self, google_id):
+ """
+ Delete all summaries attached to given Google user ID.
+
+ Args:
+ google_id: Google ID of the user
+
+ Returns:
+ bool: True if deletion successful
+ """
+ return await self.delete_many({"google_id": google_id})
+
async def find_many(
self,
query: Dict[str, Any],
limit: int = 100,
skip: int = 0,
- sort: List[tuple] = None,
- projection: Optional[Dict[str, int]] = None
+ sort: List[tuple] = None
) -> List[SummarySchema]:
"""
Find multiple summaries matching the query.
@@ -144,4 +134,4 @@ async def find_many(
if isinstance(value, datetime):
query["generated_at"][op] = value
- return await super().find_many(query, limit, skip, sort, projection=projection)
\ No newline at end of file
+ return await super().find_many(query, limit, skip, sort)
\ No newline at end of file
diff --git a/backend/app/services/email_service.py b/backend/app/services/email_service.py
index f46d994a..8d14d62e 100644
--- a/backend/app/services/email_service.py
+++ b/backend/app/services/email_service.py
@@ -2,32 +2,34 @@
Email service for handling email-related operations.
"""
-import logging
-import os
+# Standard library imports
import email
-from typing import List, Optional, Dict, Any, Tuple, Union
+import os
import re
-from email.header import decode_header
-from imapclient import IMAPClient
from datetime import datetime
-from google.auth.transport.requests import Request
+from email.header import decode_header
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+# Third-party imports
from fastapi import HTTPException, status
+from google.auth.transport.requests import Request
+from imapclient import IMAPClient
from starlette.concurrency import run_in_threadpool
-# Import from app modules
+# Internal imports
+from app.utils.helpers import get_logger, log_operation, standardize_error_response
from app.models import EmailSchema, ReaderViewResponse
-from app.services.database import EmailRepository, SummaryRepository, get_email_repository, get_summary_repository
-from app.services.database.factories import get_user_service, get_auth_service
from app.services import auth_service
+from app.services.database import EmailRepository, get_email_repository
+from app.services.database.factories import get_auth_service, get_user_service
+from app.utils.config import get_settings
-# Configure logging
-logging.basicConfig(
- level=logging.DEBUG,
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
- datefmt='%Y-%m-%d %H:%M:%S'
-)
+# -------------------------------------------------------------------------
+# Configuration
+# -------------------------------------------------------------------------
-logger = logging.getLogger(__name__)
+logger = get_logger(__name__, 'service')
+settings = get_settings()
class EmailService:
"""
@@ -37,16 +39,14 @@ class EmailService:
processing, and storage operations.
"""
- def __init__(self, email_repository: EmailRepository = None, summary_repository: SummaryRepository = None):
+ def __init__(self, email_repository: EmailRepository = None):
"""
Initialize the email service.
Args:
email_repository: Email repository instance
- summary_repository: Summary repository instance
"""
self.email_repository = email_repository or get_email_repository()
- self.summary_repository = summary_repository or get_summary_repository()
self.imap_host = 'imap.gmail.com'
self.default_email_account = os.environ.get("EMAIL_ACCOUNT")
@@ -62,16 +62,7 @@ def _ensure_email_schema(self, email_data: Union[dict, EmailSchema]) -> EmailSch
def _handle_email_error(self, error: Exception, operation: str, email_id: str = None, google_id: str = None) -> None:
"""Standardize error handling for email operations."""
- error_msg = f"Failed to {operation}"
- if email_id:
- error_msg += f" email {email_id}"
- if google_id:
- error_msg += f" for user {google_id}"
- logger.exception(f"{error_msg}: {str(error)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=error_msg
- )
+ raise standardize_error_response(error, operation, email_id, google_id)
def _get_imap_connection(self, token: str, email_account: str) -> IMAPClient:
"""Create and authenticate IMAP connection."""
@@ -80,15 +71,7 @@ def _get_imap_connection(self, token: str, email_account: str) -> IMAPClient:
server.oauth2_login(email_account, token)
return server
except Exception as e:
- logger.error(f"IMAP Authentication Error: {e}")
- if hasattr(e, 'args') and e.args:
- logger.error(f"Additional error info: {e.args}")
- raise
-
- def _log_operation(self, level: str, message: str, **kwargs) -> None:
- """Standardize logging across the service."""
- log_method = getattr(logger, level.lower())
- log_method(message, **kwargs)
+ raise standardize_error_response(e, "get imap connection", email_account)
def _build_search_query(self, search: str) -> Dict[str, Any]:
"""Build search query component."""
@@ -123,16 +106,13 @@ async def get_auth_token(self) -> str:
if credentials.expired and credentials.refresh_token:
await run_in_threadpool(lambda: credentials.refresh(Request()))
else:
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail="Token expired and cannot be refreshed. User needs to re-authenticate."
+ raise standardize_error_response(
+ Exception("Token expired and cannot be refreshed. User needs to re-authenticate."),
+ "get auth token"
)
return credentials.token
except Exception as e:
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail=f"Token retrieval failed: {str(e)}"
- )
+ raise standardize_error_response(e, "get auth token")
# -------------------------------------------------------------------------
# Email Parsing Methods
@@ -274,7 +254,7 @@ def _extract_email_body(self, email_message: email.message.Message) -> Tuple[str
body = html_part.get_payload(decode=True).decode(errors="replace")
is_html = True
except Exception as e:
- self._log_operation('error', f"Error decoding HTML part: {e}")
+ log_operation(logger, 'error', f"Error decoding HTML part: {e}")
# Fall back to text part if available
if text_part:
body = text_part.get_payload(decode=True).decode(errors="replace")
@@ -288,14 +268,14 @@ def _extract_email_body(self, email_message: email.message.Message) -> Tuple[str
body = email_message.get_payload(decode=True).decode(errors="replace")
is_html = content_type == "text/html"
except Exception as e:
- self._log_operation('error', f"Error decoding non-multipart message: {e}")
+ log_operation(logger, 'error', f"Error decoding non-multipart message: {e}")
body = email_message.get_payload(decode=False)
# Try to detect HTML if content-type wasn't reliable
is_html = bool(re.search(r'<(?:html|body|div|p|h[1-6])[^>]*>', body, re.IGNORECASE))
# Validate HTML detection with regex if needed
if is_html and not bool(re.search(r'<(?:html|body|div|p|h[1-6])[^>]*>', body, re.IGNORECASE)):
- self._log_operation('warning', "Content marked as HTML but no HTML tags found, validating...")
+ log_operation(logger, 'warning', "Content marked as HTML but no HTML tags found, validating...")
is_html = False # Reset if no HTML tags found
# Apply minimal sanitization for HTML content
@@ -402,7 +382,7 @@ def _fetch_from_imap_sync(self, token: str, email_account: str,
emails.append(email_data)
except Exception as e:
- self._log_operation('error', f"Error processing email {uid}: {e}")
+ log_operation(logger, 'error', f"Error processing email {uid}: {e}")
continue
return emails
@@ -419,7 +399,7 @@ async def save_email_to_db(self, email_data: dict) -> None:
if not existing_email:
email_schema = self._ensure_email_schema(email_data)
await self.email_repository.insert_one(email_schema)
- self._log_operation('info', f"Email {email_id} inserted successfully")
+ log_operation(logger, 'info', f"Email {email_id} inserted successfully")
except Exception as e:
self._handle_email_error(e, "save", email_data.get("email_id"), email_data.get("google_id"))
@@ -468,7 +448,7 @@ async def mark_email_as_read(self, email_id: str, google_id: str) -> Optional[Em
email_id = str(email_id)
email_data = await self.email_repository.find_by_email_and_google_id(email_id, google_id)
if not email_data or email_data["google_id"] != google_id:
- self._log_operation('warning', f"Email {email_id} not found for user {google_id}")
+ log_operation(logger, 'warning', f"Email {email_id} not found for user {google_id}")
return None
email_data["is_read"] = True
@@ -493,46 +473,24 @@ async def delete_email(self, email_id: str, google_id: str) -> bool:
return await self.email_repository.delete_by_id(str(email_id), google_id)
except Exception as e:
self._handle_email_error(e, "delete", email_id, google_id)
-
- async def search_emails_by_keyword(self, google_id: str, keyword: str, limit: int = 50) -> List[EmailSchema]:
+
+ async def delete_emails(self, google_id: str) -> bool:
"""
- Search for emails using summary keywords.
-
+ Deletes all emails attached to given Google ID.
+
Args:
- google_id: Google ID of the user.
- keyword: Keyword to search in the summary keywords.
- limit: Maximum number of emails to return.
-
+ google_id: Google ID of the user
+
Returns:
- List[EmailSchema]: List of emails whose summaries match the keyword and then enriched with corresponding summary.
+ bool: True if deletion successful
"""
- logger.info(f"[Keyword Search] google_id={google_id}, keyword='{keyword}'")
-
try:
- #Find all email_ids from summaries that match the keyword for the given user
- email_ids = await self.summary_repository.find_email_ids_by_keyword(google_id, keyword)
- if not email_ids:
- return []
-
- #Query emails from the email repository using those email_ids
- query = {"google_id": google_id, "email_id": {"$in": [str(eid) for eid in email_ids]}}
- emails = await self.email_repository.find_many(query, limit=limit)
-
- # Retrieve matching summary records to extract summary_text
- summaries = await self.summary_repository.find_many(query)
- summary_map = {s.email_id: getattr(s, "summary_text", "") for s in summaries}
-
- # Enrich email records with their corresponding summaries
- enriched = []
- for e in emails:
- base = e if isinstance(e, dict) else e.model_dump()
- base["summary_text"] = summary_map.get(base["email_id"], "")
- enriched.append(base)
-
- return enriched
-
+ return await self.email_repository.delete_by_google_id(google_id)
except Exception as e:
- self._handle_email_error(e, "search by keyword", None, google_id)
+ self._handle_email_error(e, "delete", google_id)
+
+
+
# -------------------------------------------------------------------------
# Content Processing Methods
# -------------------------------------------------------------------------
@@ -679,7 +637,7 @@ async def fetch_emails(self, google_id: str, skip: int = 0, limit: int = 20,
)
debug_info["timing"]["main_query"] = (datetime.now() - start_time).total_seconds()
- self._log_operation('info', f"Retrieved {len(emails)} emails out of {total} total for user {google_id}")
+ log_operation(logger, 'info', f"Retrieved {len(emails)} emails out of {total} total for user {google_id}")
return emails, total, debug_info
except Exception as e:
@@ -699,26 +657,26 @@ async def _refresh_emails_from_imap(self, google_id: str, debug_info: Dict[str,
# Get user by google_id
user = await user_service.get_user(google_id)
if not user:
- self._log_operation('error', f"User {google_id} not found in database during IMAP refresh")
+ log_operation(logger, 'error', f"User {google_id} not found in database during IMAP refresh")
debug_info["imap_error"] = f"User {google_id} not found"
return
user_email = user.email
if not user_email:
- self._log_operation('error', f"Email address not found for user {google_id}")
+ log_operation(logger, 'error', f"Email address not found for user {google_id}")
debug_info["imap_error"] = "User email not found"
return
- self._log_operation('info', f"Fetching emails for {user_email}")
+ log_operation(logger, 'info', f"Fetching emails for {user_email}")
# Get token using google_id
token_data = await auth_service.get_token_data(google_id)
if not token_data:
- self._log_operation('error', f"No token found for user {google_id}")
+ log_operation(logger, 'error', f"No token found for user {google_id}")
debug_info["imap_error"] = "No token found for user"
return
- self._log_operation('info', f"Fetching emails from IMAP for {user_email}")
+ log_operation(logger, 'info', f"Fetching emails from IMAP for {user_email}")
imap_emails = await self.fetch_from_imap(
token=token_data.token,
email_account=user_email,
@@ -726,18 +684,18 @@ async def _refresh_emails_from_imap(self, google_id: str, debug_info: Dict[str,
limit=50
)
- self._log_operation('info', f"Retrieved {len(imap_emails)} emails from IMAP for {user_email}")
+ log_operation(logger, 'info', f"Retrieved {len(imap_emails)} emails from IMAP for {user_email}")
for email_data in imap_emails:
email_data["google_id"] = google_id
await self.save_email_to_db(email_data)
debug_info["imap_fetch_count"] = len(imap_emails)
- self._log_operation('info', f"Saved {len(imap_emails)} emails to database for {user_email}")
+ log_operation(logger, 'info', f"Saved {len(imap_emails)} emails to database for {user_email}")
except Exception as e:
- self._log_operation('exception', f"IMAP fetch failed for user {google_id}: {str(e)}")
debug_info["imap_error"] = str(e)
+ raise standardize_error_response(e, "refresh emails from imap", google_id)
finally:
debug_info["timing"]["imap_fetch_duration"] = (datetime.now() - start_time).total_seconds()
diff --git a/backend/app/services/summarization/__init__.py b/backend/app/services/summarization/__init__.py
index 3b67676c..2ac6acbf 100644
--- a/backend/app/services/summarization/__init__.py
+++ b/backend/app/services/summarization/__init__.py
@@ -5,14 +5,19 @@
and strategies.
"""
+# Standard library imports
from typing import TypeVar, Generic
+
+# Third-party imports
from fastapi import Depends, HTTPException
-from app.utils.config import Settings, get_settings, SummarizerProvider
+# Internal imports
from app.models import EmailSchema
+from app.utils.config import Settings, get_settings, SummarizerProvider
from .base import AdaptiveSummarizer
from .providers.openai.openai import OpenAIEmailSummarizer
from .providers.google.google import GeminiEmailSummarizer
+from .providers.openrouter.openrouter import OpenRouterEmailSummarizer
from .types import ProcessingStrategy
from .summary_service import SummaryService
@@ -23,6 +28,7 @@
'ProcessingStrategy',
'OpenAIEmailSummarizer',
'GeminiEmailSummarizer',
+ 'OpenRouterEmailSummarizer',
'get_summarizer'
]
@@ -66,6 +72,17 @@ async def get_summarizer(
model=settings.summarizer_model,
batch_threshold=settings.summarizer_batch_threshold
)
+ case SummarizerProvider.OPENROUTER:
+ if not settings.openrouter_api_key:
+ raise HTTPException(
+ status_code=500,
+ detail="OpenRouter API key not configured"
+ )
+ return OpenRouterEmailSummarizer(
+ api_key=settings.openrouter_api_key,
+ prompt_version=settings.summarizer_prompt_version,
+ batch_threshold=settings.summarizer_batch_threshold
+ )
case _:
raise HTTPException(
status_code=500,
diff --git a/backend/app/services/summarization/base.py b/backend/app/services/summarization/base.py
index cba3272d..2cbd96ad 100644
--- a/backend/app/services/summarization/base.py
+++ b/backend/app/services/summarization/base.py
@@ -1,9 +1,11 @@
+# Standard library imports
from abc import ABC, abstractmethod
from typing import Generic, List, Optional, TypeVar
from datetime import datetime, timezone
import asyncio
-import logging
+# Internal imports
+from app.utils.helpers import get_logger
from app.models import SummarySchema, EmailSchema
from app.services.summarization.types import(
ModelBackend,
@@ -43,7 +45,7 @@ def __init__(
self.timeout = timeout
self.model_config = model_config or {}
self._metrics: List[SummaryMetrics] = []
- self._logger = logging.getLogger(self.__class__.__name__)
+ self._logger = get_logger(self.__class__.__name__, 'service')
@abstractmethod
async def prepare_content(self, email: T) -> str:
diff --git a/backend/app/services/summarization/prompts.py b/backend/app/services/summarization/prompts.py
index dc6218af..93ac8efd 100644
--- a/backend/app/services/summarization/prompts.py
+++ b/backend/app/services/summarization/prompts.py
@@ -1,9 +1,17 @@
-# summarization/providers/prompts.py
+"""
+Prompt management for email summarization.
+
+This module provides abstract base classes and concrete implementations for managing
+prompts across different LLM providers.
+"""
+
+# Standard library imports
from abc import ABC, abstractmethod
from typing import Optional, Protocol, Dict, Any, runtime_checkable
from dataclasses import dataclass, field
from enum import Enum
+# Internal imports
from app.utils.config import PromptVersion
@dataclass
@@ -95,15 +103,9 @@ def get_response_format(self, version: Optional[PromptVersion] = None) -> Dict[s
# Core prompt templates
EMAIL_SUMMARY_SYSTEM_PROMPT = PromptTemplate(
version=PromptVersion.V2,
- template="""You are a precise email summarizer that produces JSON output. Your task is to:
+ template="""You are a precise email summarizer. Your task is to:
1. Create a concise, factual single-sentence summary capturing the key message or request
-2. Extract 3-5 key topics or themes as keywords
-
-Return your analysis in JSON format with the following structure:
-{
- "summary": "The concise summary sentence",
- "keywords": ["keyword1", "keyword2", "keyword3"]
-}""",
+2. Extract 3-5 key topics or themes as keywords""",
metadata={
"description": "System prompt for email summarization with JSON output",
"response_format": {"type": "json_object"},
@@ -116,12 +118,10 @@ def get_response_format(self, version: Optional[PromptVersion] = None) -> Dict[s
EMAIL_SUMMARY_USER_PROMPT = PromptTemplate(
version=PromptVersion.V2,
- template="""Please analyze this email and provide the summary and keywords in JSON format.
+ template="""Please analyze this email and provide a summary and keywords.
Email Content:
-{content}
-
-Remember to format your response as JSON with 'summary' and 'keywords' fields.""",
+{content}""",
metadata={
"description": "User prompt for email summarization with JSON format specification",
"variables": ["content"]
diff --git a/backend/app/services/summarization/providers/google/google.py b/backend/app/services/summarization/providers/google/google.py
index 39c8beb6..9ec33c58 100644
--- a/backend/app/services/summarization/providers/google/google.py
+++ b/backend/app/services/summarization/providers/google/google.py
@@ -1,6 +1,9 @@
+# Standard library imports
from typing import Dict, TypeVar, List, Optional
from datetime import datetime, timezone
import json
+
+# Third-party imports
from google import genai
from google.genai import types
from tenacity import (
@@ -9,13 +12,13 @@
wait_exponential,
retry_if_exception_type
)
-# internal
-from app.services.summarization.prompts import PromptManager
-from app.utils.config import ProviderModel, SummarizerProvider
-from app.services.summarization.providers.openai.openai import OpenAIBackend, OpenAIEmailSummarizer
+
+# Internal imports
from app.models import SummarySchema
+from app.services.summarization.prompts import PromptManager, PromptVersion
+from app.services.summarization.providers.openai.openai import OpenAIBackend, OpenAIEmailSummarizer
from app.services.summarization.types import ModelBackend, ModelConfig
-from app.services.summarization.prompts import PromptVersion
+from app.utils.config import ProviderModel, SummarizerProvider
from .prompts import GeminiPromptManager
diff --git a/backend/app/services/summarization/providers/google/prompts.py b/backend/app/services/summarization/providers/google/prompts.py
index 7b7cf8f8..adfa6af4 100644
--- a/backend/app/services/summarization/providers/google/prompts.py
+++ b/backend/app/services/summarization/providers/google/prompts.py
@@ -1,12 +1,15 @@
-from dataclasses import dataclass, field
+# Standard library imports
+from dataclasses import dataclass
from typing import Any, Dict, Optional
+
+# Internal imports
from app.services.summarization.prompts import(
PromptManager,
+ PromptTemplate,
EMAIL_SUMMARY_SYSTEM_PROMPT,
EMAIL_SUMMARY_USER_PROMPT
)
from app.utils.config import PromptVersion
-from app.services.summarization.prompts import PromptTemplate
@dataclass
class GeminiPromptManager(PromptManager):
diff --git a/backend/app/services/summarization/providers/openai/openai.py b/backend/app/services/summarization/providers/openai/openai.py
index 9d576d15..3ea86553 100644
--- a/backend/app/services/summarization/providers/openai/openai.py
+++ b/backend/app/services/summarization/providers/openai/openai.py
@@ -1,7 +1,10 @@
+# Standard library imports
from typing import List, Optional, Dict, TypeVar
from datetime import datetime, timezone
import asyncio
import json
+
+# Third-party imports
from openai import (
RateLimitError,
APITimeoutError,
@@ -14,14 +17,14 @@
wait_exponential,
retry_if_exception_type
)
-# internal
-from app.services.summarization.base import AdaptiveSummarizer
-from app.services.summarization.types import ModelBackend, ModelConfig
+
+# Internal imports
from app.models import EmailSchema, SummarySchema
-from app.utils.config import ProviderModel, SummarizerProvider
+from app.services.summarization.base import AdaptiveSummarizer
from app.services.summarization.prompts import PromptManager
+from app.services.summarization.types import ModelBackend, ModelConfig
+from app.utils.config import ProviderModel, SummarizerProvider, PromptVersion
from .prompts import OpenAIPromptManager
-from app.utils.config import PromptVersion
T = TypeVar('T')
diff --git a/backend/app/services/summarization/providers/openai/prompts.py b/backend/app/services/summarization/providers/openai/prompts.py
index 85ab0429..85872ad9 100644
--- a/backend/app/services/summarization/providers/openai/prompts.py
+++ b/backend/app/services/summarization/providers/openai/prompts.py
@@ -1,5 +1,8 @@
-from dataclasses import dataclass, field
+# Standard library imports
+from dataclasses import dataclass
from typing import Any, Dict, Optional
+
+# Internal imports
from app.services.summarization.prompts import(
PromptManager,
EMAIL_SUMMARY_SYSTEM_PROMPT,
diff --git a/backend/app/services/summarization/providers/openrouter/openrouter.py b/backend/app/services/summarization/providers/openrouter/openrouter.py
new file mode 100644
index 00000000..13feabff
--- /dev/null
+++ b/backend/app/services/summarization/providers/openrouter/openrouter.py
@@ -0,0 +1,184 @@
+# Core
+from typing import List, Optional, Dict, TypeVar
+from datetime import datetime, timezone
+import asyncio
+import json
+import sys
+
+from openai import AsyncOpenAI, RateLimitError, APITimeoutError, APIError
+from tenacity import (
+ retry,
+ stop_after_attempt,
+ wait_exponential,
+ retry_if_exception_type
+)
+# internal
+from app.services.summarization.base import AdaptiveSummarizer
+from app.services.summarization.types import ModelBackend, ModelConfig
+from app.models import EmailSchema, SummarySchema
+from app.utils.config import ProviderModel, SummarizerProvider
+from app.services.summarization.prompts import PromptManager
+from .prompts import OpenRouterPromptManager
+from app.utils.config import PromptVersion
+from app.utils.helpers import get_logger
+
+class OpenRouterBackend(ModelBackend):
+ """OpenRouter implementation that delegates model routing to the provider."""
+
+ def __init__(
+ self,
+ api_key: str,
+ prompt_manager: PromptManager,
+ temperature: float = 0.3,
+ max_tokens: int = 150,
+ ):
+ self.logger = get_logger(self.__class__.__name__, 'service')
+ self.client = AsyncOpenAI(
+ base_url="https://openrouter.ai/api/v1",
+ api_key=api_key
+ )
+ self.prompt_manager = prompt_manager
+ self.temperature = temperature
+ self.max_tokens = max_tokens
+
+ @retry(
+ retry=retry_if_exception_type((
+ RateLimitError,
+ APITimeoutError,
+ APIError,
+ )),
+ wait=wait_exponential(multiplier=1, min=4, max=10),
+ stop=stop_after_attempt(3)
+ )
+ async def generate_summary(
+ self,
+ content: str,
+ config: Optional[ModelConfig] = None
+ ) -> tuple[str, List[str]]:
+ """Generate a summary, letting OpenRouter handle model selection."""
+ cfg = config or {}
+
+ messages = [
+ {
+ "role": "system",
+ "content": self.prompt_manager.get_system_prompt()
+ },
+ {
+ "role": "user",
+ "content": self.prompt_manager.get_user_prompt(content)
+ }
+ ]
+
+ # Log the request payload for diagnostics
+ request_payload = {
+ "messages": messages,
+ "temperature": cfg.get("temperature", self.temperature),
+ "max_tokens": cfg.get("max_tokens", self.max_tokens),
+ "models": ProviderModel.get_openrouter_fallbacks(),
+ "route": "fallback"
+ }
+ self.logger.info(f"Sending request to OpenRouter: {json.dumps(request_payload, indent=2)}")
+
+ try:
+ # Let OpenRouter select the model from the provided list
+ response = await self.client.chat.completions.create(
+ model=ProviderModel.get_openrouter_fallbacks()[0], # Required by SDK, OR will use list below
+ messages=messages,
+ temperature=cfg.get("temperature", self.temperature),
+ max_tokens=cfg.get("max_tokens", self.max_tokens),
+ response_format=self.prompt_manager.get_response_format(),
+ extra_body={
+ "models": ProviderModel.get_openrouter_fallbacks(),
+ "route": "fallback" # Ensures it tries models in order
+ }
+ )
+ except APIError as e:
+ self.logger.error(f"OpenRouter API request failed with status code {e.status_code}.")
+ if e.body:
+ self.logger.error(f"Error response body: {e.body}")
+ raise # Re-raise the exception to be handled by the retry decorator or calling service
+
+ # Parse the response
+ try:
+ result = json.loads(response.choices[0].message.content)
+ return result.get("summary", ""), result.get("keywords", [])
+ except (json.JSONDecodeError, KeyError, IndexError) as e:
+ # Fallback handling if JSON parsing fails
+ self.logger.warning(f"Failed to parse JSON. Response: {response.choices[0].message.content if response.choices else 'empty'}. Error: {e}")
+ summary = response.choices[0].message.content
+ return summary.strip(), []
+
+ async def batch_generate_summaries(
+ self,
+ contents: List[str],
+ config: Optional[ModelConfig] = None
+ ) -> List[tuple[str, List[str]]]:
+ """Generate summaries for multiple emails."""
+ # Implement concurrent processing with rate limiting
+ semaphore = asyncio.Semaphore(5) # Limit concurrent API calls
+
+ async def _process_with_semaphore(content: str) -> tuple[str, List[str]]:
+ async with semaphore:
+ return await self.generate_summary(content, config)
+
+ return await asyncio.gather(
+ *[_process_with_semaphore(content) for content in contents]
+ )
+
+ @property
+ def model_info(self) -> Dict[str, str]:
+ return {
+ "provider": "OpenRouter",
+ "model": "auto" # Model is selected by OpenRouter
+ }
+
+class OpenRouterEmailSummarizer(AdaptiveSummarizer[EmailSchema]):
+ """Email summarizer implementation using OpenRouter's API."""
+
+ def __init__(
+ self,
+ api_key: str,
+ batch_threshold: int = 10,
+ max_batch_size: int = 50,
+ timeout: float = 30.0,
+ prompt_version: PromptVersion = PromptVersion.latest(),
+ ):
+ prompt_manager = OpenRouterPromptManager(prompt_version=prompt_version)
+ backend = OpenRouterBackend(
+ api_key=api_key,
+ prompt_manager=prompt_manager,
+ )
+ super().__init__(
+ model_backend=backend,
+ prompt_manager=prompt_manager,
+ batch_threshold=batch_threshold,
+ max_batch_size=max_batch_size,
+ timeout=timeout
+ )
+
+ async def prepare_content(self, email: EmailSchema) -> str:
+ """Transform EmailSchema into processable content."""
+ return (
+ f"From: {email.sender}\n"
+ f"To: {', '.join(email.recipients)}\n"
+ f"Subject: {email.subject}\n"
+ f"Date: {email.received_at}\n\n"
+ f"Body:\n{email.body}"
+ )
+
+ def create_summary(
+ self,
+ email_id: str,
+ summary_text: str,
+ keywords: List[str],
+ google_id: str
+ ) -> SummarySchema:
+ """Create a SummarySchema from processing results."""
+ return SummarySchema(
+ email_id=email_id,
+ summary_text=summary_text,
+ keywords=keywords,
+ generated_at=datetime.now(timezone.utc),
+ model_info=self._backend.model_info,
+ google_id=google_id
+ )
\ No newline at end of file
diff --git a/backend/app/services/summarization/providers/openrouter/prompts.py b/backend/app/services/summarization/providers/openrouter/prompts.py
new file mode 100644
index 00000000..8f28ec4f
--- /dev/null
+++ b/backend/app/services/summarization/providers/openrouter/prompts.py
@@ -0,0 +1,69 @@
+from dataclasses import dataclass, field
+from typing import Any, Dict, List, Optional
+from app.services.summarization.prompts import(
+ BasePromptManager,
+ PromptTemplate,
+ EMAIL_SUMMARY_USER_PROMPT,
+ PromptManager,
+ EMAIL_SUMMARY_SYSTEM_PROMPT
+)
+from app.utils.config import PromptVersion
+
+# New OpenRouter-specific system prompt
+OPENROUTER_SYSTEM_PROMPT_TEMPLATE = """You are a precise email summarizer. Your task is to analyze the user's email and provide a response in valid JSON format.
+
+You must respond with a JSON object containing exactly these fields:
+- "summary": A concise, factual single-sentence summary capturing the key message or request
+- "keywords": An array of 3-5 key topics or themes extracted from the email
+
+Example response format:
+{
+ "summary": "The sender is requesting a meeting to discuss the quarterly budget review.",
+ "keywords": ["meeting", "quarterly", "budget", "review", "discussion"]
+}
+
+IMPORTANT: Respond only with a valid JSON object. Do not add any explanatory text or wrap the JSON in markdown code blocks."""
+
+EMAIL_SUMMARY_OPENROUTER_SYSTEM_PROMPT = PromptTemplate(
+ version=PromptVersion.V1,
+ template=OPENROUTER_SYSTEM_PROMPT_TEMPLATE,
+ metadata={}
+)
+
+@dataclass
+class OpenRouterPromptTemplate(PromptTemplate):
+ """
+ OpenRouter-specific prompt template with basic JSON response format.
+ """
+
+ def get_response_format(self) -> Dict[str, Any]:
+ """
+ Return the basic JSON response format supported by most OpenRouter models.
+ """
+ return {
+ "type": "json_object"
+ }
+
+# Simplified prompt template
+EMAIL_SUMMARY_OPENROUTER_PROMPT = OpenRouterPromptTemplate(
+ version=PromptVersion.V1,
+ template=EMAIL_SUMMARY_OPENROUTER_SYSTEM_PROMPT.template,
+ metadata={}
+)
+
+@dataclass
+class OpenRouterPromptManager(PromptManager):
+ """OpenRouter-specific prompt management, mirroring OpenAI's structure."""
+ prompt_version: PromptVersion = PromptVersion.latest()
+
+ def get_system_prompt(self, version: Optional[PromptVersion] = None) -> str:
+ return OPENROUTER_SYSTEM_PROMPT_TEMPLATE
+
+ def get_user_prompt(self, content: str, version: Optional[PromptVersion] = None) -> str:
+ return EMAIL_SUMMARY_USER_PROMPT.template.format(content=content)
+
+ def get_response_format(self, version: Optional[PromptVersion] = None) -> Dict[str, Any]:
+ """
+ Return the basic JSON response format supported by most OpenRouter models.
+ """
+ return {"type": "json_object"}
\ No newline at end of file
diff --git a/backend/app/services/summarization/summary_service.py b/backend/app/services/summarization/summary_service.py
index 10654c5a..d28a610e 100644
--- a/backend/app/services/summarization/summary_service.py
+++ b/backend/app/services/summarization/summary_service.py
@@ -2,13 +2,16 @@
Service for handling email summarization operations.
"""
-import logging
+# Standard library imports
from typing import List, Optional, Dict, Any
from datetime import datetime, timezone, timedelta
-from fastapi import HTTPException, status
-from fastapi import Depends
+
+# Third-party imports
+from fastapi import HTTPException, status, Depends
from bson import ObjectId
+# Internal imports
+from app.utils.helpers import get_logger, log_operation, standardize_error_response
from app.models import EmailSchema, SummarySchema
from app.services.database.repositories.summary_repository import SummaryRepository
from app.services.database.factories import get_summary_repository, get_email_service
@@ -19,15 +22,11 @@
GeminiEmailSummarizer
)
-# Configure logging with format and level
-logging.basicConfig(
- level=logging.DEBUG,
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
- datefmt='%Y-%m-%d %H:%M:%S'
-)
+# -------------------------------------------------------------------------
+# Configuration
+# -------------------------------------------------------------------------
-# Create module-specific logger
-logger = logging.getLogger(__name__)
+logger = get_logger(__name__, 'service')
class SummaryService:
"""
@@ -61,10 +60,9 @@ async def initialize(self):
await self.summary_repository.create_index("generated_at") # For time-based queries
await self.summary_repository.create_index("google_id") # For user-specific summaries
- logger.info("Summary collection indexes initialized")
+ log_operation(logger, 'info', "Summary collection indexes initialized")
except Exception as e:
- logger.error(f"Failed to initialize summary indexes: {e}")
- raise
+ raise standardize_error_response(e, "initialize summary indexes")
async def save_summary(self, summary: SummarySchema, google_id: str) -> str:
"""
@@ -101,12 +99,11 @@ async def save_summary(self, summary: SummarySchema, google_id: str) -> str:
if not result:
raise Exception("Failed to save summary")
- logger.debug(f"Summary saved for email {summary.email_id} for user {google_id}")
+ log_operation(logger, 'debug', f"Summary saved for email {summary.email_id} for user {google_id}")
return summary.email_id
except Exception as e:
- logger.error(f"Failed to save summary: {e}")
- raise HTTPException(status_code=500, detail=str(e))
+ raise standardize_error_response(e, "save summary", summary.email_id)
async def get_summary(self, email_id: str, google_id: str) -> Optional[SummarySchema]:
"""
@@ -132,8 +129,7 @@ async def get_summary(self, email_id: str, google_id: str) -> Optional[SummarySc
# Otherwise create a new SummarySchema instance
return SummarySchema(**result)
except Exception as e:
- logger.error(f"Failed to retrieve summary for email {email_id}: {e}")
- raise HTTPException(status_code=500, detail=str(e))
+ raise standardize_error_response(e, "get summary", email_id)
async def get_summaries(
self,
@@ -177,8 +173,7 @@ async def get_summaries(
# Convert results to SummarySchema objects if needed
return [result if isinstance(result, SummarySchema) else SummarySchema(**result) for result in results]
except Exception as e:
- logger.error(f"Failed to retrieve summaries: {e}")
- raise HTTPException(status_code=500, detail=str(e))
+ raise standardize_error_response(e, "get summaries")
async def search_by_keywords(
self,
@@ -208,8 +203,7 @@ async def search_by_keywords(
results = await self.summary_repository.find_many(query, limit=limit)
return [result if isinstance(result, SummarySchema) else SummarySchema(**result) for result in results]
except Exception as e:
- logger.error(f"Failed to search summaries by keywords: {e}")
- raise HTTPException(status_code=500, detail=str(e))
+ raise standardize_error_response(e, "search summaries by keywords")
async def get_recent_summaries(
self,
@@ -246,7 +240,7 @@ async def get_recent_summaries(
if google_id:
query["google_id"] = google_id
- logger.debug(f"Querying summaries between {cutoff_date.isoformat()} and {now.isoformat()}")
+ log_operation(logger, 'debug', f"Querying summaries between {cutoff_date.isoformat()} and {now.isoformat()}")
results = await self.summary_repository.find_many(
query,
@@ -254,11 +248,10 @@ async def get_recent_summaries(
sort=[("generated_at", -1)]
)
- logger.debug(f"Found {len(results)} summaries matching query")
+ log_operation(logger, 'debug', f"Found {len(results)} summaries matching query")
return [result if isinstance(result, SummarySchema) else SummarySchema(**result) for result in results]
except Exception as e:
- logger.error(f"Failed to retrieve recent summaries: {e}", exc_info=True)
- raise HTTPException(status_code=500, detail=str(e))
+ raise standardize_error_response(e, "get recent summaries")
async def delete_summary(self, email_id: str, google_id: str) -> bool:
"""
@@ -279,14 +272,39 @@ async def delete_summary(self, email_id: str, google_id: str) -> bool:
deleted = result.deleted_count > 0
if deleted:
- logger.info(f"Summary for email {email_id} deleted for user {google_id}")
+ log_operation(logger, 'info', f"Summary for email {email_id} deleted for user {google_id}")
+ else:
+ log_operation(logger, 'info', f"No summary found for email {email_id} for user {google_id} to delete")
+
+ return deleted
+ except Exception as e:
+ raise standardize_error_response(e, "delete summary", email_id)
+
+ async def delete_summaries_by_google_id(self, google_id: str) -> bool:
+ """
+ Delete all summaries for a specific Google user ID.
+
+ Args:
+ google_id: Google ID of the user whose summaries to delete
+
+ Returns:
+ bool: True if any summaries were deleted, False otherwise
+
+ Raises:
+ Exception: If database operation fails
+ """
+ try:
+ result = await self.summary_repository.delete_by_google_id({"google_id": google_id})
+ deleted = result.deleted_count > 0
+
+ if deleted:
+ log_operation(logger, 'info', f"All summaries deleted for user {google_id}")
else:
- logger.info(f"No summary found for email {email_id} for user {google_id} to delete")
+ log_operation(logger, 'info', f"No summaries found for user {google_id} to delete")
return deleted
except Exception as e:
- logger.error(f"Failed to delete summary for email {email_id}: {e}")
- raise HTTPException(status_code=500, detail=str(e))
+ raise standardize_error_response(e, "delete summaries by google id", google_id)
async def save_summaries_batch(self, summaries: List[SummarySchema], google_id: str) -> Dict[str, int]:
"""
@@ -326,14 +344,13 @@ async def save_summaries_batch(self, summaries: List[SummarySchema], google_id:
"inserted": result.upserted_count,
"modified": result.modified_count
}
- logger.info(f"Batch summary save: {result.upserted_count} inserted, "
+ log_operation(logger, 'info', f"Batch summary save: {result.upserted_count} inserted, "
f"{result.modified_count} modified for user {google_id}")
return stats
return {"inserted": 0, "modified": 0}
except Exception as e:
- logger.error(f"Error in batch saving summaries: {e}")
- raise HTTPException(status_code=500, detail=str(e))
+ raise standardize_error_response(e, "save summaries batch")
async def count_summaries(self, query: Dict = None, google_id: str = None) -> int:
"""
@@ -355,8 +372,7 @@ async def count_summaries(self, query: Dict = None, google_id: str = None) -> in
query["google_id"] = google_id
return await self.summary_repository.count_documents(query)
except Exception as e:
- logger.error(f"Failed to count summaries: {e}")
- raise HTTPException(status_code=500, detail=str(e))
+ raise standardize_error_response(e, "count summaries")
async def get_summaries_by_ids(self, email_ids: List[str], google_id: str) -> List[SummarySchema]:
"""
@@ -389,12 +405,11 @@ async def get_summaries_by_ids(self, email_ids: List[str], google_id: str) -> Li
summaries.append(SummarySchema(**doc))
# Log how many were found
- logger.debug(f"Found {len(summaries)} summaries out of {len(email_ids)} requested for user {google_id}")
+ log_operation(logger, 'debug', f"Found {len(summaries)} summaries out of {len(email_ids)} requested for user {google_id}")
return summaries
except Exception as e:
- logger.error(f"Failed to retrieve summaries by IDs: {e}")
- raise HTTPException(status_code=500, detail=str(e))
+ raise standardize_error_response(e, "get summaries by ids")
async def get_or_create_summary(
self,
@@ -422,7 +437,7 @@ async def get_or_create_summary(
# Get email data
email = await self.email_service.get_email(email_id, google_id)
if not email:
- logger.warning(f"Email {email_id} not found for user {google_id}")
+ log_operation(logger, 'warning', f"Email {email_id} not found for user {google_id}")
return None
# Generate summary using EmailSchema directly
@@ -432,7 +447,7 @@ async def get_or_create_summary(
)
if not summaries:
- logger.warning(f"Failed to generate summary for email {email_id}")
+ log_operation(logger, 'warning', f"Failed to generate summary for email {email_id}")
return None
# Create a new SummarySchema with the google_id
@@ -442,13 +457,12 @@ async def get_or_create_summary(
# Store summary
await self.save_summary(summary, google_id)
- logger.info(f"Created new summary for email {email_id}")
+ log_operation(logger, 'info', f"Created new summary for email {email_id}")
return summary.model_dump()
except Exception as e:
- logger.error(f"Failed to get or create summary for email {email_id}: {e}")
- raise HTTPException(status_code=500, detail=str(e))
+ raise standardize_error_response(e, "get or create summary", email_id)
async def get_or_create_summaries_batch(
self,
@@ -509,14 +523,15 @@ async def get_or_create_summaries_batch(
missing_emails.append(email)
else:
failed_emails.append(email_id)
- logger.warning(f"Email {email_id} not found for user {google_id}")
+ log_operation(logger, 'warning', f"Email {email_id} not found for user {google_id}")
except Exception as e:
failed_emails.append(email_id)
- logger.warning(f"Error fetching email {email_id}: {e}")
+ log_operation(logger, 'warning', f"Error fetching email {email_id}: {e}")
if missing_emails:
# Generate summaries for missing emails
try:
+ log_operation(logger, 'info', f"Attempting to generate summaries for {len(missing_emails)} emails.")
new_summaries = await summarizer.summarize(
missing_emails,
strategy=ProcessingStrategy.ADAPTIVE
@@ -527,7 +542,7 @@ async def get_or_create_summaries_batch(
await self.save_summaries_batch(new_summaries, google_id)
all_summaries.extend(new_summaries)
except Exception as e:
- logger.error(f"Failed to generate summaries for batch: {e}")
+ log_operation(logger, 'error', f"Failed to generate summaries for batch: {e}")
all_failed_summaries.extend([email.email_id for email in missing_emails])
continue
@@ -537,13 +552,13 @@ async def get_or_create_summaries_batch(
all_summaries.extend(existing_summaries)
except Exception as e:
- logger.error(f"Error processing batch {i//batch_size + 1}: {e}")
+ log_operation(logger, 'error', f"Error processing batch {i//batch_size + 1}: {e}")
# Mark all emails in this batch as failed
all_failed_summaries.extend(batch_ids)
continue
# Log final results
- logger.info(
+ log_operation(logger, 'info',
f"Batch summary results for user {google_id}: "
f"{len(all_summaries)} successful, {len(all_missing_emails)} missing emails, "
f"{len(all_failed_summaries)} failed summaries"
@@ -556,8 +571,4 @@ async def get_or_create_summaries_batch(
}
except Exception as e:
- logger.error(f"Failed to process batch summaries: {e}")
- raise HTTPException(
- status_code=500,
- detail=f"Failed to process summaries: {str(e)}"
- )
\ No newline at end of file
+ raise standardize_error_response(e, "process batch summaries")
\ No newline at end of file
diff --git a/backend/app/services/summarization/types.py b/backend/app/services/summarization/types.py
index 13606182..ddd2e0db 100644
--- a/backend/app/services/summarization/types.py
+++ b/backend/app/services/summarization/types.py
@@ -1,9 +1,12 @@
+# Standard library imports
from typing import Protocol, List, Optional, Dict
from enum import Enum, auto
-from pydantic import Field
from dataclasses import dataclass
from datetime import datetime, timezone
+# Third-party imports
+from pydantic import Field
+
ModelConfig = Dict[str, any]
class ProcessingStrategy(Enum):
diff --git a/backend/app/services/user_service.py b/backend/app/services/user_service.py
index dc47c51f..64f4b1c4 100644
--- a/backend/app/services/user_service.py
+++ b/backend/app/services/user_service.py
@@ -2,24 +2,22 @@
User service for handling user-related operations.
"""
-import logging
+# Standard library imports
from typing import Optional, Dict, Any
+
+# Third-party imports
from fastapi import HTTPException, status
-from bson import ObjectId
-from google.oauth2.credentials import Credentials
-# Import from app modules
-from app.models import UserSchema, TokenData, PreferencesSchema
-from app.services.database import UserRepository, get_user_repository
+# Internal imports
+from app.utils.helpers import get_logger, log_operation, standardize_error_response
+from app.models import UserSchema, PreferencesSchema
+from app.services.database import get_user_repository, UserRepository
-# Configure logging
-logging.basicConfig(
- level=logging.DEBUG,
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
- datefmt='%Y-%m-%d %H:%M:%S'
-)
+# -------------------------------------------------------------------------
+# Configuration
+# -------------------------------------------------------------------------
-logger = logging.getLogger(__name__)
+logger = get_logger(__name__, 'service')
class UserService:
"""
@@ -31,16 +29,14 @@ class UserService:
- Managing user authentication state
"""
- def __init__(self, user_repository: UserRepository):
+ def __init__(self, user_repository: UserRepository = None):
"""
Initialize the user service.
Args:
user_repository: User repository instance
"""
- self.user_repository = user_repository
- # Note: We can't call ensure_indexes here because it's async
- # The indexes will be created on first use
+ self.user_repository = user_repository or get_user_repository()
async def get_user(self, user_id: str) -> Optional[UserSchema]:
"""
@@ -55,11 +51,7 @@ async def get_user(self, user_id: str) -> Optional[UserSchema]:
try:
return await self.user_repository.find_by_id(user_id)
except Exception as e:
- logger.error(f"Failed to get user: {e}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to get user"
- )
+ raise standardize_error_response(e, "get user", user_id)
async def get_user_by_email(self, email: str) -> Optional[UserSchema]:
"""
@@ -74,11 +66,7 @@ async def get_user_by_email(self, email: str) -> Optional[UserSchema]:
try:
return await self.user_repository.find_by_email(email)
except Exception as e:
- logger.error(f"Failed to get user by email: {e}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to get user by email"
- )
+ raise standardize_error_response(e, "get user by email", email)
async def create_user(self, user_data: Dict[str, Any]) -> UserSchema:
"""
@@ -90,7 +78,6 @@ async def create_user(self, user_data: Dict[str, Any]) -> UserSchema:
Returns:
Created user
"""
-
try:
# Create a complete user data dictionary with all required fields
complete_user_data = {
@@ -118,20 +105,17 @@ async def create_user(self, user_data: Dict[str, Any]) -> UserSchema:
user_id = await self.user_repository.insert_one(user)
user._id = user_id
+ log_operation(logger, 'info', f"Created user: {user.email}")
return user
except Exception as e:
- logger.error(f"Failed to create user: {e}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to create user"
- )
+ raise standardize_error_response(e, "create user", user_data.get("email"))
- async def update_user(self, user_id: str, user_data: Dict[str, Any]) -> Optional[UserSchema]:
+ async def update_user(self, google_id: str, user_data: Dict[str, Any]) -> Optional[UserSchema]:
"""
Update a user.
Args:
- user_id: User ID
+ google_id: User Google ID
user_data: Updated user data
Returns:
@@ -139,49 +123,45 @@ async def update_user(self, user_id: str, user_data: Dict[str, Any]) -> Optional
"""
try:
# First get the current user to ensure it exists
- current_user = await self.user_repository.find_by_id(user_id)
+ current_user = await self.user_repository.find_by_id(google_id)
if not current_user:
- logger.error(f"User not found: {user_id}")
+ log_operation(logger, 'warning', f"User not found: {google_id}")
return None
# Update the user
- success = await self.user_repository.update_one(user_id, user_data)
+ success = await self.user_repository.update_one(google_id, user_data)
if not success:
- logger.error(f"Update failed for user: {user_id}")
+ log_operation(logger, 'warning', f"Update failed for user: {google_id}")
return None
# Get the updated user
- updated_user = await self.user_repository.find_by_id(user_id)
+ updated_user = await self.user_repository.find_by_id(google_id)
if not updated_user:
- logger.error(f"Failed to fetch updated user: {user_id}")
+ log_operation(logger, 'warning', f"Failed to fetch updated user: {google_id}")
return None
+ log_operation(logger, 'info', f"Updated user: {google_id}")
return UserSchema(**updated_user)
except Exception as e:
- logger.error(f"Failed to update user: {e}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to update user"
- )
+ raise standardize_error_response(e, "update user", google_id)
- async def delete_user(self, user_id: str) -> bool:
+ async def delete_user(self, google_id: str) -> bool:
"""
Delete a user.
Args:
- user_id: User ID
+ google_id: User Google ID
Returns:
True if deleted, False otherwise
"""
try:
- return await self.user_repository.delete_one(user_id)
+ result = await self.user_repository.delete_by_google_id(google_id)
+ if result:
+ log_operation(logger, 'info', f"Deleted user: {google_id}")
+ return result
except Exception as e:
- logger.error(f"Failed to delete user: {e}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to delete user"
- )
+ raise standardize_error_response(e, "delete user", google_id)
async def get_preferences(self, google_id: str) -> Dict[str, Any]:
"""
@@ -194,15 +174,11 @@ async def get_preferences(self, google_id: str) -> Dict[str, Any]:
Dict[str, Any]: User preferences
"""
try:
- logger.debug(f"Fetching preferences for Google ID: {google_id}")
+ log_operation(logger, 'debug', f"Fetching preferences for Google ID: {google_id}")
user = await self.user_repository.find_one({"google_id": google_id})
return user.get("preferences", {}) if user else {}
except Exception as e:
- logger.error(f"Failed to get preferences: {e}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to get preferences"
- )
+ raise standardize_error_response(e, "get preferences", google_id)
async def update_preferences(self, google_id: str, preferences: Dict[str, Any]) -> bool:
"""
@@ -216,15 +192,13 @@ async def update_preferences(self, google_id: str, preferences: Dict[str, Any])
bool: True if update successful
"""
try:
- logger.debug(f"Updating preferences for Google ID: {google_id}")
+ log_operation(logger, 'debug', f"Updating preferences for Google ID: {google_id}")
result = await self.user_repository.update_one(
- {"google_id": google_id},
- {"$set": {"preferences": preferences}}
- )
+ {"google_id": google_id},
+ {"$set": {"preferences": preferences}}
+ )
+ if result:
+ log_operation(logger, 'info', f"Updated preferences for user: {google_id}")
return result
except Exception as e:
- logger.error(f"Failed to update preferences: {e}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to update preferences"
- )
+ raise standardize_error_response(e, "update preferences", google_id)
diff --git a/backend/app/tests/conftest.py b/backend/app/tests/conftest.py
index cf6cc4b1..fa27756b 100644
--- a/backend/app/tests/conftest.py
+++ b/backend/app/tests/conftest.py
@@ -89,5 +89,6 @@ def test_settings():
"OPENAI_API_KEY": os.getenv("OPENAI_API_KEY", "test_key_123"),
"GOOGLE_CLIENT_ID": os.getenv("GOOGLE_CLIENT_ID", "test_client_id"),
"GOOGLE_CLIENT_SECRET": os.getenv("GOOGLE_CLIENT_SECRET", "test_client_secret"),
- "MONGO_URI": os.getenv("MONGO_URI", "mongodb://localhost:27017/test_db")
+ "MONGO_URI": os.getenv("MONGO_URI", "mongodb://localhost:27017/test_db"),
+ "OPENROUTER_API_KEY": os.getenv("OPENROUTER_API_KEY", "sk-or-test-key-123456789")
}
\ No newline at end of file
diff --git a/backend/app/tests/test_config.py b/backend/app/tests/test_config.py
index f8d93817..9e08dba8 100644
--- a/backend/app/tests/test_config.py
+++ b/backend/app/tests/test_config.py
@@ -16,6 +16,7 @@ class MockSettings(Settings):
google_client_secret: str = os.getenv("GOOGLE_CLIENT_SECRET", "test-client-secret")
email_account: str = os.getenv("EMAIL_ACCOUNT", "test@example.com")
mongo_uri: str = os.getenv("MONGO_URI", "mongodb://localhost:27017/test_db")
+ openrouter_api_key: str = os.getenv("OPENROUTER_API_KEY", "sk-or-test-key-123456789")
openai_api_key: str = os.getenv("OPENAI_API_KEY", "sk-test-key-123456789")
deepseek_api_key: str | None = os.getenv("DEEPSEEK_API_KEY")
gemini_api_key: str | None = os.getenv("GEMINI_API_KEY")
diff --git a/backend/app/tests/unit/summary/test_openrouter_provider.py b/backend/app/tests/unit/summary/test_openrouter_provider.py
new file mode 100644
index 00000000..df23185b
--- /dev/null
+++ b/backend/app/tests/unit/summary/test_openrouter_provider.py
@@ -0,0 +1,92 @@
+import pytest
+import asyncio
+from unittest.mock import AsyncMock, MagicMock, patch
+
+from app.models import EmailSchema
+from app.services.summarization.providers.openrouter.openrouter import OpenRouterEmailSummarizer, OpenRouterBackend
+from app.services.summarization.providers.openrouter.prompts import OpenRouterPromptManager
+from app.utils.config import PromptVersion
+
+@pytest.fixture
+def email_schema_fixture():
+ """Fixture for a sample EmailSchema."""
+ return EmailSchema(
+ email_id="test_email_id",
+ google_id="test_google_id",
+ sender="sender@example.com",
+ recipients=["recipient@example.com"],
+ subject="Test Subject",
+ body="This is a test email body.",
+ received_at="2023-01-01T00:00:00Z"
+ )
+
+@pytest.fixture
+def summarizer():
+ """Fixture for OpenRouterEmailSummarizer."""
+ return OpenRouterEmailSummarizer(api_key="test_api_key")
+
+@pytest.mark.asyncio
+async def test_summarizer_prepare_content(summarizer: OpenRouterEmailSummarizer, email_schema_fixture: EmailSchema):
+ """Test that prepare_content formats the email correctly."""
+ content = await summarizer.prepare_content(email_schema_fixture)
+ assert "From: sender@example.com" in content
+ assert "Subject: Test Subject" in content
+ assert "Body:\nThis is a test email body." in content
+
+@pytest.mark.asyncio
+@patch('app.services.summarization.providers.openrouter.openrouter.AsyncOpenAI')
+async def test_backend_generate_summary_success(mock_async_openai, email_schema_fixture: EmailSchema):
+ """Test successful summary generation with valid JSON response."""
+ # Mock the OpenAI client
+ mock_client = MagicMock()
+ mock_async_openai.return_value = mock_client
+
+ # Mock the response from chat.completions.create
+ mock_completion = MagicMock()
+ mock_completion.choices = [MagicMock()]
+ mock_completion.choices[0].message.content = '{"summary": "This is a test summary.", "keywords": ["test", "email"]}'
+
+ mock_client.chat.completions.create = AsyncMock(return_value=mock_completion)
+
+ # Setup backend
+ prompt_manager = OpenRouterPromptManager(prompt_version=PromptVersion.V1)
+ backend = OpenRouterBackend(api_key="test_key", prompt_manager=prompt_manager)
+
+ # Run the method
+ summary, keywords = await backend.generate_summary("test content")
+
+ # Assertions
+ assert summary == "This is a test summary."
+ assert keywords == ["test", "email"]
+ mock_client.chat.completions.create.assert_called_once()
+ call_args = mock_client.chat.completions.create.call_args
+ assert call_args.kwargs['model'] is not None
+ assert call_args.kwargs['messages'] is not None
+
+
+@pytest.mark.asyncio
+@patch('app.services.summarization.providers.openrouter.openrouter.AsyncOpenAI')
+async def test_backend_generate_summary_json_error(mock_async_openai):
+ """Test summary generation with a non-JSON response."""
+ # Mock the OpenAI client
+ mock_client = MagicMock()
+ mock_async_openai.return_value = mock_client
+
+ # Mock a non-JSON response
+ mock_completion = MagicMock()
+ mock_completion.choices = [MagicMock()]
+ mock_completion.choices[0].message.content = 'This is just a plain text summary.'
+
+ mock_client.chat.completions.create = AsyncMock(return_value=mock_completion)
+
+ # Setup backend
+ prompt_manager = OpenRouterPromptManager(prompt_version=PromptVersion.V1)
+ backend = OpenRouterBackend(api_key="test_key", prompt_manager=prompt_manager)
+
+ # Run the method
+ summary, keywords = await backend.generate_summary("test content")
+
+ # Assertions for fallback behavior
+ assert summary == "This is just a plain text summary."
+ assert keywords == []
+ mock_client.chat.completions.create.assert_called_once()
\ No newline at end of file
diff --git a/backend/app/utils/config.py b/backend/app/utils/config.py
index ce9297b1..c6ef111f 100644
--- a/backend/app/utils/config.py
+++ b/backend/app/utils/config.py
@@ -2,18 +2,18 @@
from pydantic_settings import BaseSettings
from functools import lru_cache
from enum import Enum
-from typing import Optional
-from pydantic import ConfigDict
+from typing import Optional, List
+from pydantic import ConfigDict, model_validator
class SummarizerProvider(str, Enum):
OPENAI = "openai" # Currently Best option
GOOGLE = "gemini"
- # TODO: Add DeepSeek
+ OPENROUTER = "openrouter"
LOCAL = "local"
@classmethod
def default(cls) -> "SummarizerProvider":
- return cls.OPENAI
+ return cls.OPENROUTER
class ProviderModel(str, Enum):
@@ -24,10 +24,11 @@ class ProviderModel(str, Enum):
# Gemini Models
GEMINI_2_FLASH_LITE = "gemini-2.0-flash-lite-preview-02-05"
GEMINI_2_FLASH = "gemini-2.0-flash-001"
-
- # TODO: Check for updates to flash_lite!
- # DeepSeek Models TODO: UNIMPLEMENTED
+ # OpenRouter -
+ OR_GPT_4_1_NANO = "openai/gpt-4.1-nano"
+ OR_MINISTRAL_8B = "mistralai/ministral-8b"
+ OR_GEMINI_2_5_FLASH = "google/gemini-2.5-flash-preview"
@classmethod
def default_for_provider(cls, provider: SummarizerProvider) -> "ProviderModel":
@@ -35,12 +36,24 @@ def default_for_provider(cls, provider: SummarizerProvider) -> "ProviderModel":
defaults = {
SummarizerProvider.OPENAI: cls.GPT_4O_MINI,
SummarizerProvider.GOOGLE: cls.GEMINI_2_FLASH_LITE,
- SummarizerProvider.LOCAL: cls.GEMINI_2_FLASH_LITE, # Fallback to OpenAI
+ SummarizerProvider.OPENROUTER: cls.OR_GPT_4_1_NANO,
+ SummarizerProvider.LOCAL: cls.GPT_4O_MINI, # Fallback to OpenAI
}
-
return defaults.get(provider, cls.GPT_4O_MINI)
+ @classmethod
+ def get_openrouter_fallbacks(cls) -> List["ProviderModel"]:
+ """
+ Get ordered list of OpenRouter models for fallback.
+ The API limits the fallback list to a maximum of 3 models.
+ """
+ return [
+ cls.OR_GPT_4_1_NANO,
+ cls.OR_MINISTRAL_8B,
+ cls.OR_GEMINI_2_5_FLASH,
+ ]
+
class PromptVersion(str, Enum):
V1 = "v1"
V2 = "v2"
@@ -69,11 +82,12 @@ class Settings(BaseSettings):
oauth_callback_url: Optional[str] = None
# AI Providers
- openai_api_key: str
- google_api_key: str | None = None
- deepseek_api_key: str | None = None
- gemini_api_key: str | None = None
-
+ openrouter_api_key: Optional[str] = None # No longer required
+ openai_api_key: Optional[str] = None
+ google_api_key: Optional[str] = None
+ deepseek_api_key: Optional[str] = None
+ gemini_api_key: Optional[str] = None
+
# Summarizer settings
summarizer_provider: SummarizerProvider = SummarizerProvider.default()
summarizer_model: ProviderModel = ProviderModel.default_for_provider(summarizer_provider)
@@ -81,6 +95,17 @@ class Settings(BaseSettings):
summarizer_prompt_version: PromptVersion = PromptVersion.latest()
model_config = ConfigDict(env_file=".env", use_enum_values=True)
+
+ @model_validator(mode='after')
+ def validate_api_keys(self) -> 'Settings':
+ """Validate that the required API keys are present for the selected provider."""
+ if self.summarizer_provider == SummarizerProvider.OPENROUTER and not self.openrouter_api_key:
+ raise ValueError("OPENROUTER_API_KEY must be set when using the OpenRouter provider")
+ if self.summarizer_provider == SummarizerProvider.OPENAI and not self.openai_api_key:
+ raise ValueError("OPENAI_API_KEY must be set when using the OpenAI provider")
+ if self.summarizer_provider == SummarizerProvider.GOOGLE and not self.google_api_key:
+ raise ValueError("GOOGLE_API_KEY must be set when using the Google provider")
+ return self
@lru_cache()
def get_settings() -> Settings:
diff --git a/backend/app/utils/helpers.py b/backend/app/utils/helpers.py
new file mode 100644
index 00000000..56cfc69e
--- /dev/null
+++ b/backend/app/utils/helpers.py
@@ -0,0 +1,146 @@
+"""
+Helper functions for Email Essence FastAPI application.
+
+This module contains shared utilities for logging and error handling that can be
+used across the application without creating circular dependencies.
+"""
+
+# Standard library imports
+import logging
+import os
+from typing import Optional
+
+# Third-party imports
+from fastapi import HTTPException, status
+
+
+def _get_logging_level_config() -> dict:
+ """
+ Get logging level configuration based on environment.
+
+ Returns:
+ dict: Mapping of module types to logging levels
+ """
+ environment = os.getenv('ENVIRONMENT', 'production').lower()
+
+ # Logging level configuration by environment
+ config = {
+ 'development': {
+ 'router': logging.DEBUG,
+ 'service': logging.DEBUG,
+ 'default': logging.DEBUG
+ },
+ 'production': {
+ 'router': logging.INFO,
+ 'service': logging.INFO,
+ 'default': logging.INFO
+ },
+ 'testing': {
+ 'router': logging.WARNING,
+ 'service': logging.WARNING,
+ 'default': logging.WARNING
+ }
+ }
+
+ return config.get(environment, config['production'])
+
+
+def get_logger(name: str, module_type: str = 'default') -> logging.Logger:
+ """
+ Get a configured logger instance with appropriate level based on environment.
+
+ Args:
+ name: Logger name (typically __name__)
+ module_type: Type of module ('router', 'service', or 'default')
+
+ Returns:
+ logging.Logger: Configured logger instance
+ """
+ logger = logging.getLogger(name)
+
+ # Get appropriate logging level based on environment and module type
+ level_config = _get_logging_level_config()
+ logging_level = level_config.get(module_type, level_config['default'])
+
+ logger.setLevel(logging_level)
+
+ # Ensure the logger has a handler to output messages to the console
+ if not logger.handlers:
+ handler = logging.StreamHandler()
+ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+ handler.setFormatter(formatter)
+ logger.addHandler(handler)
+ # Prevent logs from being passed to the root logger to avoid duplicate output
+ logger.propagate = False
+
+ return logger
+
+
+def configure_module_logging(module_name: str, module_type: str = 'default') -> logging.Logger:
+ """
+ Configure logging for a specific module with environment-based levels.
+
+ Args:
+ module_name: Name of the module
+ module_type: Type of module ('router', 'service', or 'default')
+
+ Returns:
+ logging.Logger: Configured logger instance
+ """
+ return get_logger(module_name, module_type)
+
+
+def standardize_error_response(
+ error: Exception,
+ operation: str,
+ resource_id: str = None,
+ user_id: str = None
+) -> HTTPException:
+ """
+ Standardize error responses across the application.
+
+ Args:
+ error: The original exception
+ operation: Description of the operation that failed
+ resource_id: Optional resource identifier
+ user_id: Optional user identifier
+
+ Returns:
+ HTTPException: Standardized HTTP exception
+ """
+ error_msg = f"Failed to {operation}"
+ if resource_id:
+ error_msg += f" resource {resource_id}"
+ if user_id:
+ error_msg += f" for user {user_id}"
+
+ # Log the original error for debugging
+ logger = get_logger(__name__, 'default')
+ logger.exception(f"{error_msg}: {str(error)}")
+
+ return HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=error_msg
+ )
+
+
+def log_operation(
+ logger: logging.Logger,
+ level: str,
+ message: str,
+ **kwargs
+) -> None:
+ """
+ Standardize logging across services.
+
+ Args:
+ logger: Logger instance to use
+ level: Log level (debug, info, warning, error, exception)
+ message: Log message
+ **kwargs: Additional context for logging
+ """
+ log_method = getattr(logger, level.lower())
+ if kwargs:
+ log_method(message, extra=kwargs)
+ else:
+ log_method(message)
\ No newline at end of file
diff --git a/backend/main.py b/backend/main.py
index 8806256f..2c312b9d 100644
--- a/backend/main.py
+++ b/backend/main.py
@@ -1,30 +1,33 @@
# uvicorn main:app --reload
+
+# Standard library imports
import os
-from fastapi import FastAPI, HTTPException, Depends, status
-from fastapi.responses import FileResponse
-from fastapi.middleware.cors import CORSMiddleware
-from starlette.concurrency import run_in_threadpool
from contextlib import asynccontextmanager
import logging
-# Configure logging
-logging.basicConfig(level=logging.INFO)
-# Reduce verbosity of httpx and httpcore
-logging.getLogger("httpx").setLevel(logging.WARNING)
-logging.getLogger("httpcore").setLevel(logging.WARNING)
+# Third-party imports
+from fastapi import FastAPI, HTTPException
+from fastapi.responses import FileResponse
+from fastapi.middleware.cors import CORSMiddleware
+# Internal imports
from app.routers import emails_router, summaries_router, auth_router, user_router
from app.services.database.connection import DatabaseConnection
-from app.models import EmailSchema, SummarySchema, UserSchema
+# -------------------------------------------------------------------------
+# Logging Configuration
+# -------------------------------------------------------------------------
-# from app.models.user_model import User
+# Configure logging
+logging.basicConfig(level=logging.INFO)
+# Reduce verbosity of httpx and httpcore
+logging.getLogger("httpx").setLevel(logging.WARNING)
+logging.getLogger("httpcore").setLevel(logging.WARNING)
+logger = logging.getLogger(__name__)
-@asynccontextmanager
-async def lifespan(app: FastAPI):
- await startup_db_client()
- yield
- await shutdown_db_client()
+# -------------------------------------------------------------------------
+# Database Lifecycle Management
+# -------------------------------------------------------------------------
async def startup_db_client():
"""
@@ -54,6 +57,16 @@ async def shutdown_db_client():
except Exception as e:
raise RuntimeError("Failed to close database connection") from e
+@asynccontextmanager
+async def lifespan(app: FastAPI):
+ await startup_db_client()
+ yield
+ await shutdown_db_client()
+
+# -------------------------------------------------------------------------
+# FastAPI Application Setup
+# -------------------------------------------------------------------------
+
app = FastAPI(
title="Email Essence API",
description="API for the Email Essence application",
@@ -63,6 +76,9 @@ async def shutdown_db_client():
lifespan=lifespan
)
+# -------------------------------------------------------------------------
+# Middleware Configuration
+# -------------------------------------------------------------------------
# Configure CORS
app.add_middleware(
@@ -84,15 +100,10 @@ async def shutdown_db_client():
allow_headers=["*"], # Allows all headers
)
-logger = logging.getLogger(__name__)
+# -------------------------------------------------------------------------
+# API Route Handlers
+# -------------------------------------------------------------------------
-# Register routers
-app.include_router(auth_router, prefix="/auth", tags=["Auth"])
-app.include_router(user_router, prefix="/user", tags=["User"])
-app.include_router(emails_router, prefix="/emails", tags=["Emails"])
-app.include_router(summaries_router, prefix="/summaries", tags=["Summaries"])
-
-# Root route handler
@app.get("/", tags=["Root"])
async def root():
"""
@@ -114,7 +125,7 @@ async def root():
"status": "online"
}
-# Serve favicon.ico from root directory
+# Serve favicon.ico from root directory - only served to swagger UI
@app.get('/favicon.ico')
async def favicon():
root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@@ -172,4 +183,13 @@ async def health_check():
health_status["components"]["google_api"] = f"error: {str(e)}"
health_status["status"] = "unhealthy"
- return health_status
\ No newline at end of file
+ return health_status
+
+# -------------------------------------------------------------------------
+# Router Registration
+# -------------------------------------------------------------------------
+
+app.include_router(auth_router, prefix="/auth", tags=["Auth"])
+app.include_router(user_router, prefix="/user", tags=["User"])
+app.include_router(emails_router, prefix="/emails", tags=["Emails"])
+app.include_router(summaries_router, prefix="/summaries", tags=["Summaries"])
\ No newline at end of file
diff --git a/frontend/src/authentication/authenticate.js b/frontend/src/authentication/authenticate.js
index af9ce45c..598b85b7 100644
--- a/frontend/src/authentication/authenticate.js
+++ b/frontend/src/authentication/authenticate.js
@@ -5,7 +5,13 @@ export const authenticate = async () => {
window.location.href = `${baseUrl}/auth/login?redirect_uri=${redirect_uri}`;
};
-// When Reach loading component call this function
+/**
+ * Handles the OAuth callback after authentication.
+ * Parses the auth state from the URL hash, verifies authentication, and stores the token.
+ * Navigates to the error page if authentication fails.
+ * @async
+ * @returns {PromiseTheme