From 7b6ffee95af0cba2ef5e367a9d6b5dc5783f2166 Mon Sep 17 00:00:00 2001 From: Shayan Shahla <143126171+S-Shahla@users.noreply.github.com> Date: Tue, 17 Jun 2025 09:44:16 -0700 Subject: [PATCH] Revert "merge" --- .github/workflows/docker-image.yml | 17 +- README.md | 12 +- backend/.env.example | 5 +- backend/README.md | 9 +- backend/app/dependencies.py | 130 ---------- backend/app/routers/auth_router.py | 137 +++++++---- backend/app/routers/emails_router.py | 97 ++++---- backend/app/routers/summaries_router.py | 135 ++++++---- backend/app/routers/user_router.py | 232 +++++++++++------- backend/app/services/auth_service.py | 127 +++++----- backend/app/services/database/connection.py | 16 +- .../database/repositories/base_repository.py | 22 +- .../database/repositories/email_repository.py | 12 - .../repositories/summary_repository.py | 40 +-- backend/app/services/email_service.py | 146 +++++++---- .../app/services/summarization/__init__.py | 19 +- backend/app/services/summarization/base.py | 6 +- backend/app/services/summarization/prompts.py | 26 +- .../summarization/providers/google/google.py | 13 +- .../summarization/providers/google/prompts.py | 7 +- .../summarization/providers/openai/openai.py | 13 +- .../summarization/providers/openai/prompts.py | 5 +- .../providers/openrouter/openrouter.py | 184 -------------- .../providers/openrouter/prompts.py | 69 ------ .../services/summarization/summary_service.py | 119 ++++----- backend/app/services/summarization/types.py | 5 +- backend/app/services/user_service.py | 112 +++++---- backend/app/tests/conftest.py | 3 +- backend/app/tests/test_config.py | 1 - .../unit/summary/test_openrouter_provider.py | 92 ------- backend/app/utils/config.py | 53 ++-- backend/app/utils/helpers.py | 146 ----------- backend/main.py | 72 ++---- frontend/src/authentication/authenticate.js | 22 +- .../client/{dashboard => }/client.css | 0 frontend/src/components/client/client.jsx | 20 +- .../components/client/dashboard/dashboard.jsx | 14 +- .../components/client/dashboard/miniview.jsx | 43 ---- .../src/components/client/inbox/Email.jsx | 6 - .../components/client/inbox/emailDisplay.jsx | 37 +-- .../src/components/client/inbox/inbox.jsx | 81 ++---- frontend/src/components/client/reducers.jsx | 21 -- .../components/client/settings/settings.css | 59 +---- .../components/client/settings/settings.jsx | 194 ++++----------- frontend/src/components/login/Error.jsx | 3 +- frontend/src/components/login/Home.css | 159 ++---------- frontend/src/components/login/Home.jsx | 1 - frontend/src/components/login/contact.jsx | 27 +- frontend/src/components/login/privacy.jsx | 82 +------ frontend/src/components/login/terms.jsx | 72 +++++- frontend/src/components/router/Router.jsx | 10 - frontend/src/emails/emailHandler.js | 45 +--- frontend/src/main.css | 12 +- frontend/src/main.jsx | 5 +- 54 files changed, 965 insertions(+), 2030 deletions(-) delete mode 100644 backend/app/dependencies.py delete mode 100644 backend/app/services/summarization/providers/openrouter/openrouter.py delete mode 100644 backend/app/services/summarization/providers/openrouter/prompts.py delete mode 100644 backend/app/tests/unit/summary/test_openrouter_provider.py delete mode 100644 backend/app/utils/helpers.py rename frontend/src/components/client/{dashboard => }/client.css (100%) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 923620d1..0ef42e49 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -44,15 +44,14 @@ jobs: which pytest python -m pytest --maxfail=2 env: - EMAIL_ACCOUNT: ${{ secrets.TEST_EMAIL_ACCOUNT }} - GOOGLE_CLIENT_ID: ${{ secrets.TEST_GOOGLE_CLIENT_ID }} - GOOGLE_CLIENT_SECRET: ${{ secrets.TEST_GOOGLE_CLIENT_SECRET }} - MONGO_URI: ${{ secrets.TEST_MONGO_URI }} - OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - OPENROUTER_API_KEY: ${{ secrets.OPENROUTER_API_KEY }} - DEEPSEEK_API_KEY: ${{ secrets.DEEPSEEK_API_KEY }} - GOOGLE_API_KEY: ${{ secrets.GEMINI_API_KEY }} - SUMMARIZER_PROVIDER: ${{ secrets.SUMMARIZER_PROVIDER || 'openrouter' }} + TEST_EMAIL_ACCOUNT: ${{ secrets.TEST_EMAIL_ACCOUNT }} + TEST_GOOGLE_CLIENT_ID: ${{ secrets.TEST_GOOGLE_CLIENT_ID }} + TEST_GOOGLE_CLIENT_SECRET: ${{ secrets.TEST_GOOGLE_CLIENT_SECRET }} + TEST_MONGO_URI: ${{ secrets.TEST_MONGO_URI }} + TEST_OPENAI_API_KEY: ${{ secrets.TEST_OPENAI_API_KEY }} + TEST_DEEPSEEK_API_KEY: ${{ secrets.TEST_DEEPSEEK_API_KEY }} + TEST_GOOGLE_API_KEY: ${{ secrets.TEST_GEMINI_API_KEY }} + TEST_SUMMARIZER_PROVIDER: ${{ secrets.TEST_SUMMARIZER_PROVIDER || 'openai' }} # Add a placeholder job that will always run when tests are disabled test-skipped: diff --git a/README.md b/README.md index 8e27d509..9f3a4cfb 100644 --- a/README.md +++ b/README.md @@ -18,28 +18,28 @@ EmailEssence is a sophisticated email management solution that leverages artific ### Feature Complete (FC) Features - 🎨 Customizable dashboard with modular components - 🔍 Advanced keyword analysis and topic identification +- 💻 Cross-platform desktop support via Electron - 🔄 Incremental email fetching for large inboxes - 🎯 Smart email prioritization - 🛠️ Enhanced user preferences and settings -### Future Features -- 💻 Cross-platform desktop support via Electron - ## Technical Stack ### Frontend - React - Modern UI framework -- Vite - Frontend tooling and build server -- JavaScript - Core language for the frontend +- Remix - Full-stack web framework +- Electron - Desktop application framework +- JavaScript - Type-safe development ### Backend - Python - Core backend services - FastAPI - High-performance API framework - MongoDB - Flexible document database - Redis - High-performance caching -- Flexible AI provider support (OpenAI, Google, OpenRouter) +- OpenRouter - AI-powered email processing ### Infrastructure +- Express.js - Web server and middleware - OAuth 2.0 - Secure authentication - IMAP - Email protocol support diff --git a/backend/.env.example b/backend/.env.example index 2459087c..b1fbbe48 100644 --- a/backend/.env.example +++ b/backend/.env.example @@ -9,10 +9,9 @@ GOOGLE_CLIENT_SECRET=google_client_secret MONGO_URI=mongodb+srv://:
@emailsummarization.1coye.mongodb.net/?retryWrites=true&w=majority&appName=EmailSummarization
# API keys
-OPEN_ROUTER_API_KEY=openrouter_api_key
-#OPENAI_API_KEY=openai_api_key
+OPENAI_API_KEY=openai_api_key
#DEEPSEEK_API_KEY=deepseek_api_key
-#GOOGLE_API_KEY=google_api_key
+GOOGLE_API_KEY=google_api_key
# Summarizer Settings
# SUMMARIZER_PROVIDER=openai
diff --git a/backend/README.md b/backend/README.md
index 175c5d70..2751cd23 100644
--- a/backend/README.md
+++ b/backend/README.md
@@ -23,9 +23,9 @@ For local development without Docker, use one of the provided setup scripts:
#### On Windows:
-```ps1
+```bash
# Run the setup script to create a virtual environment and install dependencies
-.\\setup.ps1
+.\setup.bat
```
These scripts will:
@@ -134,7 +134,7 @@ For Render deployments, environment variables are configured through the Render
- `google_client_secret`
- `email_account`
- `mongo_uri`
- - `openrouter_api_key`
+ - `openai_api_key`
- Any optional variables you wish to override
This separates your development environment configuration from your production deployment, following security best practices.
@@ -152,6 +152,9 @@ For CI/CD environments, use the CI setup scripts:
```bash
# Unix/Linux/macOS
./setup-ci.sh
+
+# Windows
+.\setup-ci.bat
```
## Troubleshooting
diff --git a/backend/app/dependencies.py b/backend/app/dependencies.py
deleted file mode 100644
index f79f2204..00000000
--- a/backend/app/dependencies.py
+++ /dev/null
@@ -1,130 +0,0 @@
-"""
-Common dependencies for Email Essence FastAPI application.
-
-This module centralizes shared dependencies including authentication schemes,
-logging configuration, and common helper functions used across routers and services.
-"""
-
-import logging
-from typing import Dict, Any
-
-from fastapi import Depends, HTTPException, status
-from fastapi.security import OAuth2PasswordBearer
-
-# Internal imports
-from app.models.user_models import UserSchema
-from app.services.auth_service import AuthService
-from app.services.user_service import UserService
-from app.services.database.factories import get_auth_service, get_user_service
-from app.utils.helpers import get_logger, configure_module_logging, standardize_error_response, log_operation
-
-# -------------------------------------------------------------------------
-# Authentication Dependencies
-# -------------------------------------------------------------------------
-
-# Centralized OAuth2 scheme
-oauth2_scheme = OAuth2PasswordBearer(
- tokenUrl="/auth/token",
- description="Enter the token you received from the login flow (without Bearer prefix)"
-)
-
-async def get_current_user_email(
- token: str = Depends(oauth2_scheme),
- auth_service: AuthService = Depends(get_auth_service)
-) -> str:
- """
- Dependency to extract user email from valid token.
-
- Args:
- token: JWT token from OAuth2 authentication
- auth_service: Auth service instance
-
- Returns:
- str: User's email address
-
- Raises:
- HTTPException: 401 error if token is invalid
- """
- try:
- user_data = await auth_service.get_credentials_from_token(token)
- return user_data['email']
- except Exception as e:
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail="Invalid authentication credentials",
- headers={"WWW-Authenticate": "Bearer"},
- )
-
-async def get_current_user_info(
- token: str = Depends(oauth2_scheme),
- auth_service: AuthService = Depends(get_auth_service)
-) -> Dict[str, Any]:
- """
- Validates token and returns user information.
-
- Args:
- token: JWT token from OAuth2 authentication
- auth_service: Auth service instance
-
- Returns:
- dict: User information and credentials
-
- Raises:
- HTTPException: 401 error if token is invalid
- """
- try:
- user_data = await auth_service.get_credentials_from_token(token)
- return user_data
- except Exception as e:
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail=f"Invalid authentication: {str(e)}"
- )
-
-async def get_current_user(
- user_data: Dict[str, Any] = Depends(get_current_user_info),
- user_service: UserService = Depends(get_user_service)
-) -> UserSchema:
- """
- Retrieve user details or create user if they don't exist.
-
- Args:
- user_data: User information and credentials from token validation
- user_service: User service instance
-
- Returns:
- UserSchema: User profile information
-
- Raises:
- HTTPException: If user retrieval fails
- """
- try:
- user_info = user_data['user_info']
- user_email = user_info.get('email')
- google_id = user_info.get('google_id')
-
- # Try to get existing user
- user = await user_service.get_user_by_email(user_email)
-
- # If user doesn't exist, create new user
- if not user:
- user = await user_service.create_user({
- "email": user_email,
- "name": user_info.get("name", ""),
- "picture": user_info.get("picture", ""),
- "google_id": google_id
- })
- else:
- # Update google_id if it's missing
- user_dict = user.model_dump()
- if not user_dict.get('google_id'):
- await user_service.update_user(user_dict['_id'], {"google_id": google_id})
-
- return user
- except Exception as e:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Failed to retrieve user: {str(e)}"
- )
-
-
\ No newline at end of file
diff --git a/backend/app/routers/auth_router.py b/backend/app/routers/auth_router.py
index 9ba23d40..47fac4b9 100644
--- a/backend/app/routers/auth_router.py
+++ b/backend/app/routers/auth_router.py
@@ -6,47 +6,74 @@
via OAuth2 to retrieve and process email data.
"""
-# Standard library imports
-import base64
import json
import urllib.parse
+import base64
import uuid
+from typing import Dict, Optional, Any
+from functools import lru_cache
-# Third-party imports
-from fastapi import APIRouter, Depends, Form, HTTPException, Query, status
-from fastapi.responses import HTMLResponse, RedirectResponse
+from fastapi import APIRouter, HTTPException, status, Depends, Request, Query, Form
+from fastapi.security import OAuth2AuthorizationCodeBearer, OAuth2PasswordBearer
+from fastapi.responses import RedirectResponse, HTMLResponse, JSONResponse
from google.auth.transport.requests import Request as GoogleRequest
from google.oauth2.credentials import Credentials
-from googleapiclient.discovery import build
from starlette.concurrency import run_in_threadpool
+from pydantic import BaseModel, EmailStr
+from google_auth_oauthlib.flow import Flow
+from googleapiclient.discovery import build
-# Internal imports
-from app.dependencies import get_current_user_email, oauth2_scheme
-from app.utils.helpers import get_logger, log_operation
-from app.models import (
- AuthStatusResponse,
- ExchangeCodeRequest,
- RefreshTokenRequest,
- TokenData,
- TokenResponse,
- VerifyTokenRequest,
-)
-from app.services.auth_service import SCOPES, AuthService
-from app.services.database.factories import get_auth_service, get_user_service
+from app.services.auth_service import AuthService, SCOPES
from app.services.user_service import UserService
from app.utils.config import get_settings
-
-# -------------------------------------------------------------------------
-# Router Configuration
-# -------------------------------------------------------------------------
+from app.services.database.factories import get_auth_service, get_user_service
+from app.models import TokenData, TokenResponse, AuthStatusResponse, ExchangeCodeRequest, RefreshTokenRequest, VerifyTokenRequest
router = APIRouter()
settings = get_settings()
-logger = get_logger(__name__, 'router')
-# -------------------------------------------------------------------------
-# Endpoints
-# -------------------------------------------------------------------------
+# -- Authentication Schemes --
+
+# This is a simpler authentication scheme for Swagger UI
+# It only shows a token field without client_id/client_secret
+oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/token", description="Enter the token you received from the login flow (without Bearer prefix)")
+
+# -- Authentication Utility --
+
+async def get_current_user_email(
+ token: str = Depends(oauth2_scheme),
+ auth_service: AuthService = Depends(get_auth_service)
+):
+ """
+ Dependency to extract user email from valid token.
+ Will raise 401 automatically if token is invalid.
+
+ Args:
+ token: JWT token from OAuth2 authentication
+ auth_service: Auth service instance
+
+ Returns:
+ str: User's email address
+
+ Raises:
+ HTTPException: 401 error if token is invalid
+ """
+ try:
+ # Get user info from token
+ user_data = await auth_service.get_credentials_from_token(token)
+ return user_data['email']
+ except Exception as e:
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Invalid authentication credentials",
+ headers={"WWW-Authenticate": "Bearer"},
+ )
+
+# -- Endpoints --
+
+# Debugging helper function
+def debug(message: str):
+ print(f"[DEBUG] {message}")
@router.get(
"/login",
@@ -72,7 +99,7 @@ async def login(
Returns:
RedirectResponse: Redirects to Google's authentication page
"""
- log_operation(logger, 'debug', f"Login initiated - Redirect URI: {redirect_uri}")
+ debug(f"Login initiated - Redirect URI: {redirect_uri}")
try:
# Create a state object that includes the frontend redirect URI
@@ -88,13 +115,17 @@ async def login(
result = auth_service.create_authorization_url(encoded_custom_state)
authorization_url = result["authorization_url"]
- log_operation(logger, 'debug', f"Generated Google OAuth URL: {authorization_url}")
+ debug(f"Generated Google OAuth URL: {authorization_url}")
# Now redirect to the correct URL
return RedirectResponse(authorization_url)
except Exception as e:
- raise standardize_error_response(e, "login")
+ debug(f"[ERROR] Login failed: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Failed to create authorization URL: {str(e)}"
+ )
@router.get("/callback")
async def callback(
@@ -115,7 +146,7 @@ async def callback(
Returns:
RedirectResponse: Redirects to frontend with authentication state
"""
- log_operation(logger, 'debug', f"Received callback with code: {code}")
+ debug(f"Received callback with code: {code}")
try:
if not state:
@@ -125,13 +156,13 @@ async def callback(
decoded_state = json.loads(base64.urlsafe_b64decode(state).decode())
frontend_url = decoded_state.get("redirect_uri")
- log_operation(logger, 'debug', f"Decoded state - Redirecting to frontend: {frontend_url}")
+ debug(f"Decoded state - Redirecting to frontend: {frontend_url}")
if not frontend_url:
raise ValueError("Missing redirect URI in state parameter")
# Exchange code for tokens and get user info in one step
- log_operation(logger, 'debug', "Exchanging code for tokens and getting user info...")
+ debug("Exchanging code for tokens and getting user info...")
token_data = await auth_service.get_tokens_from_code(code, None) # First exchange
# Get user info using the token
@@ -152,7 +183,7 @@ async def callback(
)
user_email = user_info.get('email')
- log_operation(logger, 'debug', f"User email retrieved: {user_email}")
+ debug(f"User email retrieved: {user_email}")
if not user_email:
raise ValueError("Could not retrieve user email from Google")
@@ -160,7 +191,7 @@ async def callback(
# Check if user exists, create if not
user = await user_service.get_user_by_email(user_email)
if not user:
- log_operation(logger, 'info', f"Creating new user: {user_email}")
+ debug(f"Creating new user: {user_email}")
user = await user_service.create_user({
"email": user_email,
"name": user_info.get("name", ""),
@@ -168,7 +199,7 @@ async def callback(
"google_id": user_info.get("id")
})
else:
- log_operation(logger, 'info', f"Found existing user: {user_email}")
+ debug(f"Found existing user: {user_email}")
# Special handling for Swagger UI testing
if "localhost:8000/docs" in frontend_url or "/docs" in frontend_url:
@@ -208,7 +239,7 @@ async def exchange_code(
Requires the user's email to associate the tokens.
"""
- log_operation(logger, 'info', f"Exchanging OAuth code for user: {request.user_email}")
+ debug(f"Exchanging OAuth code for user: {request.user_email}")
try:
if not request.code or not request.user_email:
raise HTTPException(
@@ -219,7 +250,7 @@ async def exchange_code(
# Exchange auth code for tokens and store them in MongoDB
tokens = await auth_service.get_tokens_from_code(request.code, request.user_email)
- log_operation(logger, 'debug', f"Token exchange successful for {request.user_email}")
+ debug(f"Token exchange successful for {request.user_email}")
return TokenResponse(
access_token=tokens.token,
token_type="bearer",
@@ -228,8 +259,11 @@ async def exchange_code(
)
except Exception as e:
- log_operation(logger, 'error', f"Code exchange failed: {str(e)}")
- raise standardize_error_response(e, "exchange code")
+ debug(f"[ERROR] Code exchange failed: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail=f"Failed to exchange code for tokens: {str(e)}"
+ )
@router.get("/token", response_model=TokenResponse)
async def get_token(
@@ -249,7 +283,10 @@ async def get_token(
token_type="bearer"
)
except Exception as e:
- raise standardize_error_response(e, "get token")
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail=f"Token retrieval failed: {str(e)}"
+ )
@router.post("/refresh", response_model=TokenResponse)
async def refresh_token(
@@ -276,7 +313,10 @@ async def refresh_token(
token_type="bearer"
)
except Exception as e:
- raise standardize_error_response(e, "refresh token")
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail=f"Token refresh failed: {str(e)}"
+ )
@router.get("/status", response_model=AuthStatusResponse)
async def auth_status(
@@ -291,14 +331,14 @@ async def auth_status(
try:
# Extract user info from the token
user_data = await auth_service.get_credentials_from_token(token)
- user_google_id = user_data['google_id']
+ user_email = user_data['google_id']
- log_operation(logger, 'debug', f"User google_id extracted from token: {user_google_id}")
+ debug(f"User google_id extracted from token: {google_id}")
# Get detailed credentials from the database using that email
try:
# Get the token record directly from the database instead of using get_credentials
- token_record = await auth_service.get_token_record(user_google_id)
+ token_record = await auth_service.get_token_record(google_id)
if not token_record:
return AuthStatusResponse(
@@ -324,7 +364,7 @@ async def auth_status(
except Exception as e:
# Token validation failed
- log_operation(logger, 'error', f"Auth status check failed: {str(e)}")
+ debug(f"[ERROR] Auth status check failed: {str(e)}")
return AuthStatusResponse(
is_authenticated=False,
token_valid=False,
@@ -547,4 +587,7 @@ async def token_endpoint(
"token_type": "bearer"
}
except Exception as e:
- raise standardize_error_response(e, "token endpoint")
\ No newline at end of file
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail=f"Failed to store token: {str(e)}"
+ )
\ No newline at end of file
diff --git a/backend/app/routers/emails_router.py b/backend/app/routers/emails_router.py
index 91b89949..30ffdc36 100644
--- a/backend/app/routers/emails_router.py
+++ b/backend/app/routers/emails_router.py
@@ -6,30 +6,53 @@
It provides a set of REST endpoints for interacting with the user's email data.
"""
-# Standard library imports
-from typing import Optional
+from fastapi import APIRouter, HTTPException, Query, Depends, status
+from fastapi.security import OAuth2PasswordBearer
+from typing import List, Optional
+from pydantic import BaseModel
+import logging
+from functools import lru_cache
-# Third-party imports
-from fastapi import APIRouter, Depends, HTTPException, Query, status
-
-# Internal imports
-from app.dependencies import get_current_user
-from app.utils.helpers import get_logger, log_operation, standardize_error_response
-from app.models.email_models import EmailResponse, EmailSchema, ReaderViewResponse
+from app.models.email_models import EmailSchema, EmailResponse, ReaderViewResponse
from app.models.user_models import UserSchema
-from app.services.database.factories import get_email_service
+from app.routers.user_router import get_current_user
+from app.services.database.factories import get_email_repository, get_email_service
from app.services.email_service import EmailService
-# -------------------------------------------------------------------------
-# Router Configuration
-# -------------------------------------------------------------------------
-
router = APIRouter()
-logger = get_logger(__name__, 'router')
-# -------------------------------------------------------------------------
-# Endpoints
-# -------------------------------------------------------------------------
+# Configure logging with format and level
+logging.basicConfig(
+ level=logging.INFO,
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S'
+)
+
+# Add specific configuration for pymongo's logger
+logging.getLogger('pymongo').setLevel(logging.WARNING)
+
+# Create module-specific logger
+logger = logging.getLogger(__name__)
+
+@router.get("/search")
+async def search_emails_by_keyword(
+ keyword: str,
+ email_service: EmailService = Depends(get_email_service),
+ user: UserSchema = Depends(get_current_user)
+):
+ """
+ Search emails using extracted summary keywords.
+
+ Args:
+ keyword: Keyword to search for
+ email_service: Injected email service
+ user: Current user (from token)
+
+ Returns:
+ List of matched emails based on summary keywords
+ """
+ logger.info(f"Search endpoint hit with keyword: {keyword}")
+ return await email_service.search_emails_by_keyword(user.google_id, keyword)
@router.get(
"/",
@@ -88,8 +111,8 @@ async def retrieve_emails(
try:
# Log request parameters
- log_operation(logger, 'debug', f"Email retrieval request with refresh={refresh}", extra={"params": debug_info["request_params"]})
- log_operation(logger, 'debug', f"Google ID for email retrieval: {user.google_id}")
+ logger.debug(f"Email retrieval request with refresh={refresh}", extra={"params": debug_info["request_params"]})
+ logger.debug(f"Google ID for email retrieval: {user.google_id}")
emails, total, service_debug_info = await email_service.fetch_emails(
google_id=user.google_id,
@@ -105,7 +128,7 @@ async def retrieve_emails(
# Combine debug info
debug_info.update(service_debug_info)
- log_operation(logger, 'info', f"Retrieved {len(emails)} emails out of {total} total")
+ logger.info(f"Retrieved {len(emails)} emails out of {total} total")
return EmailResponse(
emails=emails,
@@ -115,7 +138,9 @@ async def retrieve_emails(
)
except Exception as e:
- raise standardize_error_response(e, "retrieve emails")
+ error_msg = f"Failed to retrieve emails: {str(e)}"
+ logger.exception(error_msg) # This logs the full stack trace
+ raise HTTPException(status_code=500, detail=error_msg)
@router.get(
"/{email_id}",
@@ -144,11 +169,7 @@ async def retrieve_email(
"""
email = await email_service.get_email(email_id, user.google_id)
if not email:
- raise standardize_error_response(
- Exception("Email not found"),
- "get email",
- email_id
- )
+ raise HTTPException(status_code=404, detail="Email not found")
return email
@router.put(
@@ -178,11 +199,7 @@ async def mark_email_as_read(
"""
updated_email = await email_service.mark_email_as_read(email_id, user.google_id)
if not updated_email:
- raise standardize_error_response(
- Exception("Email not found"),
- "mark email as read",
- email_id
- )
+ raise HTTPException(status_code=404, detail="Email not found")
return updated_email
@router.delete(
@@ -212,11 +229,7 @@ async def delete_email(
"""
success = await email_service.delete_email(email_id, user.google_id)
if not success:
- raise standardize_error_response(
- Exception("Email not found"),
- "delete email",
- email_id
- )
+ raise HTTPException(status_code=404, detail="Email not found")
return {"message": "Email deleted successfully"}
@router.get(
@@ -250,15 +263,13 @@ async def get_email_reader_view(
reader_content = await email_service.get_email_reader_view(email_id, user.google_id)
if not reader_content:
- raise standardize_error_response(
- Exception("Email not found"),
- "get email reader view",
- email_id
- )
+ raise HTTPException(status_code=404, detail="Email not found")
return reader_content
except Exception as e:
if isinstance(e, HTTPException):
raise e
- raise standardize_error_response(e, "generate reader view", email_id)
\ No newline at end of file
+ error_msg = f"Failed to generate reader view: {str(e)}"
+ logger.exception(error_msg)
+ raise HTTPException(status_code=500, detail=error_msg)
\ No newline at end of file
diff --git a/backend/app/routers/summaries_router.py b/backend/app/routers/summaries_router.py
index 66ea7c67..7b4ac071 100644
--- a/backend/app/routers/summaries_router.py
+++ b/backend/app/routers/summaries_router.py
@@ -6,40 +6,41 @@
strategies to provide concise representations of emails.
"""
-# Standard library imports
import logging
-from typing import List
+from typing import List, Optional, Annotated
+from fastapi import APIRouter, HTTPException, Depends, Query, Path, status
+from contextlib import asynccontextmanager
-# Third-party imports
-from fastapi import APIRouter, Depends, HTTPException, Path, Query, status
-
-# Internal imports
-from app.dependencies import get_current_user
-from app.utils.helpers import get_logger, log_operation, standardize_error_response
+from app.utils.config import Settings, get_settings, SummarizerProvider
from app.models import EmailSchema, SummarySchema, UserSchema
-from app.services import SummaryService
-from app.services.database.factories import get_email_service, get_summary_service
+from app.services import EmailService, SummaryService
+from app.services.summarization import get_summarizer
+from app.services.summarization.base import AdaptiveSummarizer
from app.services.summarization import (
- GeminiEmailSummarizer,
- OpenAIEmailSummarizer,
- ProcessingStrategy,
- get_summarizer,
+ ProcessingStrategy,
+ OpenAIEmailSummarizer,
+ GeminiEmailSummarizer
+)
+from app.routers.user_router import get_current_user
+from app.services.database.factories import (
+ get_summary_service,
+ get_email_service
)
-from app.services.summarization.base import AdaptiveSummarizer
-
-# -------------------------------------------------------------------------
-# Router Configuration
-# -------------------------------------------------------------------------
-router = APIRouter()
+# Configure logging with format and level
+logging.basicConfig(
+ level=logging.INFO,
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S'
+)
# Add specific configuration for pymongo's logger
logging.getLogger('pymongo').setLevel(logging.WARNING)
-logger = get_logger(__name__, 'router')
-# -------------------------------------------------------------------------
-# Endpoints
-# -------------------------------------------------------------------------
+# Create module-specific logger
+logger = logging.getLogger(__name__)
+
+router = APIRouter()
@router.get(
"/batch",
@@ -86,17 +87,15 @@ async def get_summaries_by_ids(
if not result['summaries']:
if result['missing_emails'] and not result['failed_summaries']:
# Only missing emails, no generation failures
- raise standardize_error_response(
- Exception("Emails not found"),
- "get summaries by ids",
- result['missing_emails']
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Emails not found: {result['missing_emails']}"
)
elif result['failed_summaries'] and not result['missing_emails']:
# Only generation failures, no missing emails
- raise standardize_error_response(
- Exception("Failed to generate summaries"),
- "get summaries by ids",
- result['failed_summaries']
+ raise HTTPException(
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
+ detail=f"Failed to generate summaries for emails: {result['failed_summaries']}"
)
else:
# Both missing emails and generation failures
@@ -104,15 +103,14 @@ async def get_summaries_by_ids(
"missing_emails": result['missing_emails'],
"failed_summaries": result['failed_summaries']
}
- raise standardize_error_response(
- Exception("No summaries could be generated"),
- "get summaries by ids",
- error_details
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"No summaries could be generated: {error_details}"
)
# If we have some successful summaries but also some failures, log a warning
if result['missing_emails'] or result['failed_summaries']:
- log_operation(logger, 'warning', f"Partial success for user {user.google_id}: "
+ logger.warning(
f"Partial success for user {user.google_id}: "
f"{len(result['summaries'])} successful, "
f"{len(result['missing_emails'])} missing, "
@@ -125,7 +123,11 @@ async def get_summaries_by_ids(
# Re-raise HTTP exceptions as-is
raise
except Exception as e:
- raise standardize_error_response(e, "retrieve/generate summaries by IDs")
+ logger.error(f"Error retrieving/generating summaries by IDs: {str(e)}", exc_info=True)
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Failed to retrieve email summaries: {str(e)}"
+ )
@router.get(
"/",
@@ -240,7 +242,12 @@ async def get_summaries(
)
except Exception as e:
- raise standardize_error_response(e, "process email summaries")
+ # Log the full error for debugging
+ logger.error(f"Error processing summaries: {str(e)}", exc_info=True)
+ raise HTTPException(
+ status_code=500,
+ detail="Failed to process email summaries"
+ )
@router.get(
"/recent/{days}",
@@ -267,7 +274,7 @@ async def get_recent_summaries(
"""
try:
# Log request parameters
- log_operation(logger, 'debug', f"Getting recent summaries for user {user.email} - days: {days}, limit: {limit}")
+ logger.debug(f"Getting recent summaries for user {user.email} - days: {days}, limit: {limit}")
# Get summaries from service
summaries = await summary_service.get_recent_summaries(
@@ -276,10 +283,22 @@ async def get_recent_summaries(
google_id=user.google_id
)
- log_operation(logger, 'debug', f"Retrieved {len(summaries)} summaries for user {user.email}")
+ logger.debug(f"Retrieved {len(summaries)} summaries for user {user.email}")
return summaries
except Exception as e:
- raise standardize_error_response(e, "retrieve recent summaries")
+ logger.error(
+ f"Error retrieving recent summaries for user {user.email}: {str(e)}",
+ exc_info=True,
+ extra={
+ "user_email": user.email,
+ "days": days,
+ "limit": limit
+ }
+ )
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to retrieve recent summaries"
+ )
@router.get(
"/keyword/{keyword}",
@@ -312,7 +331,11 @@ async def search_by_keyword(
results = await summary_service.search_by_keywords([keyword], limit=limit, google_id=user.google_id)
return results
except Exception as e:
- raise standardize_error_response(e, "search summaries by keyword")
+ logging.error(f"Error searching summaries by keyword: {str(e)}", exc_info=True)
+ raise HTTPException(
+ status_code=500,
+ detail="Failed to search summaries"
+ )
@router.get("/{email_id}", response_model=SummarySchema)
async def get_summary_by_id(
@@ -337,16 +360,19 @@ async def get_summary_by_id(
# Get summary from repository
summary = await summary_service.get_or_create_summary(email_id, summarizer, user.google_id)
if not summary:
- raise standardize_error_response(
- Exception("Summary not found"),
- "get summary",
- email_id
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Summary not found for email {email_id}"
)
return SummarySchema(**summary)
except Exception as e:
- raise standardize_error_response(e, "retrieve/generate summary", email_id)
+ logger.error(f"Error retrieving/generating summary: {e}", exc_info=True)
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=str(e)
+ )
@router.post(
"/summarize",
@@ -398,7 +424,11 @@ async def summarize_single_email(
return summary
except Exception as e:
- raise standardize_error_response(e, "generate email summary")
+ logging.error(f"Error summarizing email: {str(e)}", exc_info=True)
+ raise HTTPException(
+ status_code=500,
+ detail="Failed to generate email summary"
+ )
@router.delete(
"/{email_id}",
@@ -427,9 +457,8 @@ async def delete_summary(
"""
deleted = await summary_service.delete_summary(email_id, user.google_id)
if not deleted:
- raise standardize_error_response(
- Exception("Summary not found"),
- "delete summary",
- email_id
+ raise HTTPException(
+ status_code=404,
+ detail=f"Summary for email {email_id} not found"
)
return {"message": f"Summary for email {email_id} deleted"}
diff --git a/backend/app/routers/user_router.py b/backend/app/routers/user_router.py
index 2c9133ec..9363b8c3 100644
--- a/backend/app/routers/user_router.py
+++ b/backend/app/routers/user_router.py
@@ -5,29 +5,61 @@
It provides endpoints for retrieving and updating user information and preferences.
"""
-# Third-party imports
-from fastapi import APIRouter, Depends, HTTPException, status
+from fastapi import APIRouter, HTTPException, status, Depends
+from fastapi.security import OAuth2PasswordBearer
+from fastapi.responses import JSONResponse
+from functools import lru_cache
+from typing import Optional, Dict, Any
-# Internal imports
-from app.dependencies import get_current_user, get_current_user_info
-from app.utils.helpers import get_logger, log_operation, standardize_error_response
-from app.models import PreferencesSchema, UserSchema
+from google.auth.transport.requests import Request as GoogleRequest
+from google.oauth2.credentials import Credentials
+from googleapiclient.discovery import build
+from starlette.concurrency import run_in_threadpool
+
+from app.models import UserSchema, PreferencesSchema
from app.services.auth_service import AuthService
from app.services.user_service import UserService
-from app.services.email_service import EmailService
-from app.services.summarization.summary_service import SummaryService
-from app.services.database.factories import get_auth_service, get_user_service, get_email_service, get_summary_service
-
-# -------------------------------------------------------------------------
-# Router Configuration
-# -------------------------------------------------------------------------
+from app.services.database.factories import get_user_service, get_auth_service
router = APIRouter()
-logger = get_logger(__name__, 'router')
-# -------------------------------------------------------------------------
-# Endpoints
-# -------------------------------------------------------------------------
+# OAuth authentication scheme
+oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/token", description="Enter the token you received from the login flow (without Bearer prefix)")
+
+# Debugging helper function
+def debug(message: str):
+ """Print debug messages with a consistent format"""
+ print(f"[DEBUG] {message}")
+
+async def get_current_user_info(
+ token: str = Depends(oauth2_scheme),
+ auth_service: AuthService = Depends(get_auth_service)
+):
+ """
+ Validates token and returns user information.
+
+ Args:
+ token: JWT token from OAuth2 authentication
+ auth_service: Auth service instance
+
+ Returns:
+ dict: User information and credentials
+
+ Raises:
+ HTTPException: 401 error if token is invalid
+ """
+ debug(f"Validating token for user authentication...")
+
+ try:
+ user_data = await auth_service.get_credentials_from_token(token)
+ debug(f"User authenticated successfully: {user_data.get('user_info', {}).get('email', 'Unknown')}")
+ return user_data
+ except Exception as e:
+ debug(f"[ERROR] Authentication failed: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail=f"Invalid authentication: {str(e)}"
+ )
@router.get(
"/me",
@@ -35,20 +67,61 @@
summary="Get current user profile",
description="Retrieves the authenticated user's profile information or creates a new user record if one doesn't exist"
)
-async def get_current_user_profile(
- user: UserSchema = Depends(get_current_user)
+async def get_current_user(
+ user_data: dict = Depends(get_current_user_info),
+ user_service: UserService = Depends(get_user_service)
):
"""
- Retrieve current user profile.
+ Retrieve user details or create user if they don't exist.
Args:
- user: Current authenticated user from dependency
+ user_data: User information and credentials from token validation
+ user_service: User service instance
Returns:
UserSchema: User profile information
+
+ Raises:
+ HTTPException: If user retrieval fails
"""
- logger.debug(f"User profile retrieved: {user.email}")
- return user
+ debug("Retrieving current user...")
+
+ try:
+ user_info = user_data['user_info']
+ user_email = user_info.get('email')
+ google_id = user_info.get('google_id')
+
+ debug(f"Fetching user from database or creating new: {user_email}")
+
+ # Try to get existing user
+ user = await user_service.get_user_by_email(user_email)
+
+ # If user doesn't exist, create new user
+ if not user:
+ debug(f"Creating new user: {user_email}")
+ user = await user_service.create_user({
+ "email": user_email,
+ "name": user_info.get("name", ""),
+ "picture": user_info.get("picture", ""),
+ "google_id": google_id
+ })
+ else:
+ debug(f"Found existing user: {user_email}")
+ # Convert UserSchema to dict for checking google_id
+ user_dict = user.dict()
+ # Update google_id if it's missing
+ if not user_dict.get('google_id'):
+ debug(f"Updating missing google_id for user: {user_email}")
+ await user_service.update_user(user_dict['_id'], {"google_id": google_id})
+
+ debug(f"User retrieval successful: {user_email}")
+ return user
+ except Exception as e:
+ debug(f"[ERROR] Failed to retrieve user: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Failed to retrieve user: {str(e)}"
+ )
@router.get(
"/preferences",
@@ -72,28 +145,34 @@ async def get_user_preferences(
Raises:
HTTPException: If preferences cannot be retrieved
"""
- logger.debug("Retrieving user preferences...")
+ debug("Retrieving user preferences...")
try:
user_info = user_data['user_info']
user_email = user_info.get('email')
- logger.debug(f"Fetching preferences for user: {user_email}")
+ debug(f"Fetching preferences for user: {user_email}")
# Get user first to ensure they exist
user = await user_service.get_user_by_email(user_email)
if not user:
- logger.debug(f"User not found: {user_email}")
+ debug(f"User not found: {user_email}")
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
- preferences = user.preferences.model_dump()
- logger.debug(f"Preferences retrieved successfully for user: {user_email}")
+ preferences = user.preferences.dict()
+ debug(f"Preferences retrieved successfully for user: {user_email}")
return {"preferences": preferences}
+ except HTTPException:
+ raise
except Exception as e:
- raise standardize_error_response(e, "retrieve user preferences")
+ debug(f"[ERROR] Failed to retrieve user preferences: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Failed to retrieve user preferences: {str(e)}"
+ )
@router.put(
"/preferences",
@@ -119,36 +198,36 @@ async def update_preferences(
Raises:
HTTPException: If preference update fails
"""
- logger.debug("Updating user preferences...")
+ debug("Updating user preferences...")
try:
user_info = user_data['user_info']
user_email = user_info.get('email')
- logger.debug(f"Updating preferences for user: {user_email}")
+ debug(f"Updating preferences for user: {user_email}")
# Get user first to ensure they exist
user = await user_service.get_user_by_email(user_email)
if not user:
- logger.debug(f"User not found: {user_email}")
+ debug(f"User not found: {user_email}")
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
- logger.debug(f"Found user with ID: {user.google_id}")
- logger.debug(f"Current user data: {user.model_dump()}")
+ debug(f"Found user with ID: {user.google_id}")
+ debug(f"Current user data: {user.dict()}")
# Create update data with existing user fields and new preferences
update_data = {
"google_id": user.google_id,
"email": user.email,
"name": user.name,
- "oauth": user.oauth.model_dump() if hasattr(user, 'oauth') and user.oauth else {},
- "preferences": preferences.model_dump()
+ "oauth": user.oauth.dict() if hasattr(user, 'oauth') else {},
+ "preferences": preferences.dict()
}
- logger.debug(f"Update data: {update_data}")
+ debug(f"Update data: {update_data}")
# Update user with new preferences
try:
@@ -157,20 +236,30 @@ async def update_preferences(
update_data
)
except Exception as e:
- raise standardize_error_response(e, "update preferences")
+ debug(f"Error updating user: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Failed to update user: {str(e)}"
+ )
if not updated_user:
- logger.debug("Update returned None")
+ debug("Update returned None")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to update preferences"
)
- logger.debug(f"Updated user data: {updated_user.model_dump()}")
- logger.debug(f"Preferences updated successfully for user: {user_email}")
- return {"preferences": updated_user.preferences.model_dump()}
+ debug(f"Updated user data: {updated_user.dict()}")
+ debug(f"Preferences updated successfully for user: {user_email}")
+ return {"preferences": updated_user.preferences.dict()}
+ except HTTPException:
+ raise
except Exception as e:
- raise standardize_error_response(e, "update preferences")
+ debug(f"[ERROR] Failed to update preferences: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Failed to update preferences: {str(e)}"
+ )
@router.get("/{user_id}", response_model=UserSchema)
async def get_user(
@@ -194,10 +283,9 @@ async def get_user(
"""
user = await user_service.get_user(user_id)
if not user:
- raise standardize_error_response(
- Exception("User not found"),
- "get user",
- user_id
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="User not found"
)
return user
@@ -221,10 +309,9 @@ async def get_user_by_email(
"""
user = await user_service.get_user_by_email(email)
if not user:
- raise standardize_error_response(
- Exception("User not found"),
- "get user by email",
- email
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="User not found"
)
return user
@@ -269,10 +356,9 @@ async def update_user(
"""
user = await user_service.update_user(user_id, user_data)
if not user:
- raise standardize_error_response(
- Exception("User not found"),
- "update user",
- user_id
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="User not found"
)
return user
@@ -280,9 +366,7 @@ async def update_user(
async def delete_user(
user_id: str,
user_service: UserService = Depends(get_user_service),
- auth_service: AuthService = Depends(get_auth_service),
- email_service: EmailService = Depends(get_email_service),
- summary_service: SummaryService = Depends(get_summary_service)
+ auth_service: AuthService = Depends(get_auth_service)
) -> dict:
"""
Delete a user.
@@ -291,8 +375,6 @@ async def delete_user(
user_id: The ID of the user to delete
user_service: Injected UserService instance
auth_service: Injected AuthService instance
- email_service: Injected EmailService instance
- summary_service: Injected SummaryService instance
Returns:
dict: Success message
@@ -300,31 +382,11 @@ async def delete_user(
Raises:
HTTPException: 404 if user not found
"""
- successDeleteUser = await user_service.delete_user(user_id)
- if not successDeleteUser:
- raise standardize_error_response(
- HTTPException(status_code=404, detail="User not found"),
- action="delete user",
- context=user_id
+ success = await user_service.delete_user(user_id)
+ if not success:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="User not found"
)
-
- successDeleteEmails = await email_service.delete_emails(user_id)
- if not successDeleteEmails:
- raise standardize_error_response(
- HTTPException(status_code=500, detail="Failed to delete user emails"),
- action="delete emails by user ID",
- context=user_id
- )
-
- # Note: This is commented out to avoid having to inccur the cost of deleting and resummarizing
-
- #successDeleteSummaries = await summary_service.delete_summaries_by_google_id(user_id)
- #if not successDeleteSummaries:
- # raise standardize_error_response(
- # HTTPException(status_code=404, detail="Summaries not found"),
- # action="delete summaries by user ID",
- # context=user_id
- #)
-
return {"message": "User deleted successfully"}
\ No newline at end of file
diff --git a/backend/app/services/auth_service.py b/backend/app/services/auth_service.py
index b7fe871d..622da344 100644
--- a/backend/app/services/auth_service.py
+++ b/backend/app/services/auth_service.py
@@ -5,36 +5,32 @@
and user authentication with Google.
"""
-# Standard library imports
+import logging
import os
+from typing import Optional, Dict, Any
from datetime import datetime, timedelta
-from typing import Any, Dict, Optional
-
-# Third-party imports
from fastapi import HTTPException, status
-from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import Flow
+from google.auth.transport.requests import Request
from googleapiclient.discovery import build
from starlette.concurrency import run_in_threadpool
-# Internal imports
-from app.utils.helpers import get_logger, log_operation, standardize_error_response
-from app.models import AuthState, TokenData, UserSchema
-from app.services.database import (
- TokenRepository,
- UserRepository,
- get_token_repository,
- get_user_repository,
-)
+# Import from app modules
+from app.models import TokenData, AuthState
+from app.services.database import TokenRepository, UserRepository, get_token_repository, get_user_repository
from app.services.user_service import UserService
from app.utils.config import Settings, get_settings
-# -------------------------------------------------------------------------
-# Configuration
-# -------------------------------------------------------------------------
+# Configure logging
+logging.basicConfig(
+ level=logging.DEBUG,
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S'
+)
+
+logger = logging.getLogger(__name__)
-logger = get_logger(__name__, 'service')
settings = get_settings()
SCOPES = [
@@ -90,55 +86,56 @@ async def verify_user_access(
Raises:
HTTPException: 403 if access is denied
"""
- log_operation(logger, 'debug', f"Verifying user access for user ID: {user_id}")
+ logger.debug(f"Verifying user access for user ID: {user_id}")
try:
# Get the current user's email from the token data
token_record = await self.get_token_record(current_user_data['google_id'])
if not token_record:
- log_operation(logger, 'warning', f"No token record found for user: {current_user_data['google_id']}")
- raise standardize_error_response(
- Exception("No valid token record found"),
- "verify user access",
- current_user_data['google_id']
+ logger.warning(f"No token record found for user: {current_user_data['google_id']}")
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="No valid token record found"
)
current_user = await user_service.get_user(current_user_data['user_info']['id'])
if not current_user:
- raise standardize_error_response(
- Exception("Current user not found"),
- "verify user access",
- current_user_data['google_id']
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="Current user not found"
)
# Allow access if:
# 1. User is accessing their own data
# 2. User has admin privileges
if current_user['google_id'] == user_id or current_user.get('is_admin', False):
- log_operation(logger, 'debug', f"Access granted for user ID: {user_id}")
+ logger.debug(f"Access granted for user ID: {user_id}")
return True
- log_operation(logger, 'debug', f"Access denied for user ID: {user_id}")
- raise standardize_error_response(
- Exception("You do not have permission to access this resource"),
- "verify user access",
- user_id
+ logger.debug(f"Access denied for user ID: {user_id}")
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="You do not have permission to access this resource"
)
+ except HTTPException:
+ raise
except Exception as e:
- raise standardize_error_response(e, "verify user access", user_id)
+ logger.error(f"Access verification failed: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Failed to verify access: {str(e)}"
+ )
def create_authorization_url(self, custom_state=None) -> Dict[str, str]:
"""Generates Google OAuth2 authorization URL."""
- log_operation(logger, 'debug', "Generating Google OAuth2 authorization URL...")
+ logger.debug("Generating Google OAuth2 authorization URL...")
client_id = settings.google_client_id
client_secret = settings.google_client_secret
if not client_id or not client_secret:
- raise standardize_error_response(
- Exception("Google API credentials missing"),
- "create authorization URL"
- )
+ logger.error("Google API credentials missing.")
+ raise HTTPException(status_code=500, detail="Google API credentials not found in settings.")
client_config = {
"web": {
@@ -153,7 +150,7 @@ def create_authorization_url(self, custom_state=None) -> Dict[str, str]:
flow = Flow.from_client_config(client_config, SCOPES)
flow.redirect_uri = self.get_redirect_uri()
- log_operation(logger, 'debug', f"Using redirect URI: {flow.redirect_uri}")
+ logger.debug(f"Using redirect URI: {flow.redirect_uri}")
if custom_state:
authorization_url, _ = flow.authorization_url(
@@ -239,7 +236,11 @@ async def get_tokens_from_code(self, code: str, email: str) -> TokenData:
return token
except Exception as e:
- raise standardize_error_response(e, "get tokens from code", email)
+ logger.error(f"Failed to get tokens for user {email}: {e}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to get tokens"
+ )
async def get_current_user(self, email: str) -> Optional[Dict[str, Any]]:
"""
@@ -263,7 +264,11 @@ async def get_current_user(self, email: str) -> Optional[Dict[str, Any]]:
))
return user.model_dump()
except Exception as e:
- raise standardize_error_response(e, "get current user", email)
+ logger.error(f"Failed to get current user: {e}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to get current user"
+ )
async def get_token_data(self, google_id: str) -> Optional[TokenData]:
"""
@@ -278,14 +283,18 @@ async def get_token_data(self, google_id: str) -> Optional[TokenData]:
try:
return await self.token_repository.find_by_google_id(google_id)
except Exception as e:
- raise standardize_error_response(e, "get token data", google_id)
+ logger.error(f"Failed to get token record for google_id {google_id}: {e}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to get token record"
+ )
def get_redirect_uri(self):
"""Returns the OAuth redirect URI."""
- log_operation(logger, 'debug', "Retrieving redirect URI...")
+ logger.debug("Retrieving redirect URI...")
if callback_url := settings.oauth_callback_url:
- log_operation(logger, 'debug', f"Using env-specified callback URL: {callback_url}")
+ logger.debug(f"Using env-specified callback URL: {callback_url}")
return callback_url
environment = settings.environment
@@ -302,7 +311,7 @@ async def get_credentials_from_token(self, token: str):
Validates a token and returns user information from Google.
Used for authenticating API requests.
"""
- log_operation(logger, 'debug', "Validating access token and retrieving user info...")
+ logger.debug("Validating access token and retrieving user info...")
try:
# First try to validate the token directly
@@ -322,13 +331,13 @@ async def get_credentials_from_token(self, token: str):
service.userinfo().get().execute()
)
except Exception as e:
- log_operation(logger, 'debug', f"Initial token validation failed, attempting refresh: {e}")
+ logger.debug(f"Initial token validation failed, attempting refresh: {e}")
# If token validation fails, try to get a new token using refresh token
token_record = await self.token_repository.find_by_token(token)
if not token_record or not token_record.refresh_token:
- raise standardize_error_response(
- Exception("Invalid or expired token"),
- "get credentials from token"
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Invalid or expired token"
)
# Create credentials with refresh token
@@ -360,13 +369,13 @@ async def get_credentials_from_token(self, token: str):
)
if not user_info or not user_info.get('email'):
- log_operation(logger, 'error', "Unable to retrieve user email from token.")
+ logger.error("Unable to retrieve user email from token.")
raise ValueError("Unable to retrieve user email from token")
# Add google_id to user_info
user_info['google_id'] = user_info.get('id')
- log_operation(logger, 'info', f"User info retrieved for: {user_info.get('email')}")
+ logger.info(f"User info retrieved for: {user_info.get('email')}")
return {
'user_info': user_info,
@@ -375,7 +384,8 @@ async def get_credentials_from_token(self, token: str):
}
except Exception as e:
- raise standardize_error_response(e, "get credentials from token")
+ logger.exception("Token validation failed.")
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=f"Invalid token: {str(e)}")
async def get_token_record(self, google_id: str) -> Optional[Dict[str, Any]]:
"""
@@ -388,15 +398,16 @@ async def get_token_record(self, google_id: str) -> Optional[Dict[str, Any]]:
Optional[Dict[str, Any]]: Complete token record if found, None otherwise
"""
try:
- log_operation(logger, 'debug', f"Getting token record for google_id: {google_id}")
+ logger.debug(f"Getting token record for google_id: {google_id}")
token_data = await self.token_repository.find_by_google_id(google_id)
if not token_data:
- log_operation(logger, 'warning', f"No token record found for google_id: {google_id}")
+ logger.warning(f"No token record found for google_id: {google_id}")
return None
- log_operation(logger, 'info', f"Found token record for google_id: {google_id}")
+ logger.info(f"Found token record for google_id: {google_id}")
# Convert TokenData to dict if it's a model instance
if hasattr(token_data, 'model_dump'):
return token_data.model_dump()
return token_data
except Exception as e:
- raise standardize_error_response(e, "get token record", google_id)
+ logger.error(f"Failed to get token record for google_id {google_id}: {e}")
+ return None
diff --git a/backend/app/services/database/connection.py b/backend/app/services/database/connection.py
index 7d65a16d..6b3284bd 100644
--- a/backend/app/services/database/connection.py
+++ b/backend/app/services/database/connection.py
@@ -33,17 +33,15 @@ async def initialize(self):
if self._client is None:
try:
settings = get_settings()
-
- # Define valid connection options
+
+ # Configure connection options
connection_options = {
- #"serverSelectionTimeoutMS": 5000, # 5 second timeout
- #"connectTimeoutMS": 10000, # 10 second connection timeout
- "retryWrites": True, # Enable retryable writes on bad connections
- "retryReads": True, # Enable retryable reads on bad connections
- "maxPoolSize": 100, # Maximum number of connections in the pool
- "minPoolSize": 2, # Minimum number of connections in the pool
+ "serverSelectionTimeoutMS": 5000, # 5 second timeout
+ "connectTimeoutMS": 10000, # 10 second connection timeout
+ "retryWrites": True, # Enable retryable writes
+ "retryReads": True, # Enable retryable reads
}
-
+
self._client = AsyncIOMotorClient(
settings.mongo_uri,
**connection_options
diff --git a/backend/app/services/database/repositories/base_repository.py b/backend/app/services/database/repositories/base_repository.py
index bb1fd2e9..a9a6a867 100644
--- a/backend/app/services/database/repositories/base_repository.py
+++ b/backend/app/services/database/repositories/base_repository.py
@@ -131,7 +131,8 @@ async def find_many(
query: Dict[str, Any],
limit: int = 100,
skip: int = 0,
- sort: List[tuple] = None
+ sort: List[tuple] = None,
+ projection: Optional[Dict[str, int]] = None
) -> List[T]:
"""
Find multiple documents matching the query with pagination support.
@@ -141,12 +142,13 @@ async def find_many(
limit: Maximum number of documents to return
skip: Number of documents to skip
sort: List of (field, direction) tuples for sorting
+ projection: Dictionary specifying fields to include/exclude (e.g., {"email_id": 1})
Returns:
List[T]: List of matching documents
"""
try:
- cursor = self._get_collection().find(query)
+ cursor = self._get_collection().find(query, projection) if projection else self._get_collection().find(query)
if sort:
cursor = cursor.sort(sort)
@@ -270,22 +272,6 @@ async def delete_one(self, query: Dict[str, Any]) -> bool:
return result.deleted_count > 0
except Exception as e:
raise
-
- async def delete_many(self, query: Dict[str, Any]) -> bool:
- """
- Delete multiple documents matching the query.
-
- Args:
- query: MongoDB query filter
-
- Returns:
- bool: True if deletion successful
- """
- try:
- result = await self._get_collection().delete_many(query)
- return result.deleted_count > 0
- except Exception as e:
- raise
async def count_documents(self, query: Dict[str, Any]) -> int:
"""
diff --git a/backend/app/services/database/repositories/email_repository.py b/backend/app/services/database/repositories/email_repository.py
index 1275dee7..0bec7afa 100644
--- a/backend/app/services/database/repositories/email_repository.py
+++ b/backend/app/services/database/repositories/email_repository.py
@@ -82,18 +82,6 @@ async def update_by_email_and_google_id(
update_data
)
- async def delete_by_google_id(self, google_id: str) -> bool:
- """
- Delete all emails by Google user ID.
-
- Args:
- google_id: Google ID of the user
-
- Returns:
- bool: True if deletion successful
- """
- return await self.delete_many({"google_id": google_id})
-
async def delete_by_email_and_google_id(self, email_id: str, google_id: str) -> bool:
"""
Delete an email by IMAP UID and Google user ID.
diff --git a/backend/app/services/database/repositories/summary_repository.py b/backend/app/services/database/repositories/summary_repository.py
index cfe5d05f..c3597497 100644
--- a/backend/app/services/database/repositories/summary_repository.py
+++ b/backend/app/services/database/repositories/summary_repository.py
@@ -59,6 +59,28 @@ async def find_by_google_id(self, google_id: str) -> List[SummarySchema]:
"""
return await self.find_many({"google_id": google_id})
+ async def find_email_ids_by_keyword(self, google_id: str, keyword: str) -> List[str]:
+ """
+ Search for emails using summary keywords.
+
+ Args:
+ google_id: Google ID of the user.
+ keyword: Keyword to search in the summary keywords.
+ limit: Maximum number of emails to return.
+
+ Returns:
+ List[EmailSchema]: List of emails whose summaries match the keyword.
+ """
+
+ query = {
+ "google_id": google_id,
+ "keywords": {"$regex": keyword, "$options": "i"}
+ }
+ raw_results = await self._get_collection().find(query, {"email_id": 1}).to_list(length=100)
+
+ return [doc["email_id"] for doc in raw_results if "email_id" in doc]
+
+
async def update_by_email_id(
self,
email_id: str,
@@ -94,26 +116,14 @@ async def delete_by_email_and_google_id(self, email_id: str, google_id: str) ->
bool: True if deletion successful
"""
return await self.delete_one({"email_id": email_id, "google_id": google_id})
-
- async def delete_by_google_id(self, google_id):
- """
- Delete all summaries attached to given Google user ID.
-
- Args:
- google_id: Google ID of the user
-
- Returns:
- bool: True if deletion successful
- """
- return await self.delete_many({"google_id": google_id})
-
async def find_many(
self,
query: Dict[str, Any],
limit: int = 100,
skip: int = 0,
- sort: List[tuple] = None
+ sort: List[tuple] = None,
+ projection: Optional[Dict[str, int]] = None
) -> List[SummarySchema]:
"""
Find multiple summaries matching the query.
@@ -134,4 +144,4 @@ async def find_many(
if isinstance(value, datetime):
query["generated_at"][op] = value
- return await super().find_many(query, limit, skip, sort)
\ No newline at end of file
+ return await super().find_many(query, limit, skip, sort, projection=projection)
\ No newline at end of file
diff --git a/backend/app/services/email_service.py b/backend/app/services/email_service.py
index 8d14d62e..f46d994a 100644
--- a/backend/app/services/email_service.py
+++ b/backend/app/services/email_service.py
@@ -2,34 +2,32 @@
Email service for handling email-related operations.
"""
-# Standard library imports
-import email
+import logging
import os
+import email
+from typing import List, Optional, Dict, Any, Tuple, Union
import re
-from datetime import datetime
from email.header import decode_header
-from typing import Any, Dict, List, Optional, Tuple, Union
-
-# Third-party imports
-from fastapi import HTTPException, status
-from google.auth.transport.requests import Request
from imapclient import IMAPClient
+from datetime import datetime
+from google.auth.transport.requests import Request
+from fastapi import HTTPException, status
from starlette.concurrency import run_in_threadpool
-# Internal imports
-from app.utils.helpers import get_logger, log_operation, standardize_error_response
+# Import from app modules
from app.models import EmailSchema, ReaderViewResponse
+from app.services.database import EmailRepository, SummaryRepository, get_email_repository, get_summary_repository
+from app.services.database.factories import get_user_service, get_auth_service
from app.services import auth_service
-from app.services.database import EmailRepository, get_email_repository
-from app.services.database.factories import get_auth_service, get_user_service
-from app.utils.config import get_settings
-# -------------------------------------------------------------------------
-# Configuration
-# -------------------------------------------------------------------------
+# Configure logging
+logging.basicConfig(
+ level=logging.DEBUG,
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S'
+)
-logger = get_logger(__name__, 'service')
-settings = get_settings()
+logger = logging.getLogger(__name__)
class EmailService:
"""
@@ -39,14 +37,16 @@ class EmailService:
processing, and storage operations.
"""
- def __init__(self, email_repository: EmailRepository = None):
+ def __init__(self, email_repository: EmailRepository = None, summary_repository: SummaryRepository = None):
"""
Initialize the email service.
Args:
email_repository: Email repository instance
+ summary_repository: Summary repository instance
"""
self.email_repository = email_repository or get_email_repository()
+ self.summary_repository = summary_repository or get_summary_repository()
self.imap_host = 'imap.gmail.com'
self.default_email_account = os.environ.get("EMAIL_ACCOUNT")
@@ -62,7 +62,16 @@ def _ensure_email_schema(self, email_data: Union[dict, EmailSchema]) -> EmailSch
def _handle_email_error(self, error: Exception, operation: str, email_id: str = None, google_id: str = None) -> None:
"""Standardize error handling for email operations."""
- raise standardize_error_response(error, operation, email_id, google_id)
+ error_msg = f"Failed to {operation}"
+ if email_id:
+ error_msg += f" email {email_id}"
+ if google_id:
+ error_msg += f" for user {google_id}"
+ logger.exception(f"{error_msg}: {str(error)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=error_msg
+ )
def _get_imap_connection(self, token: str, email_account: str) -> IMAPClient:
"""Create and authenticate IMAP connection."""
@@ -71,7 +80,15 @@ def _get_imap_connection(self, token: str, email_account: str) -> IMAPClient:
server.oauth2_login(email_account, token)
return server
except Exception as e:
- raise standardize_error_response(e, "get imap connection", email_account)
+ logger.error(f"IMAP Authentication Error: {e}")
+ if hasattr(e, 'args') and e.args:
+ logger.error(f"Additional error info: {e.args}")
+ raise
+
+ def _log_operation(self, level: str, message: str, **kwargs) -> None:
+ """Standardize logging across the service."""
+ log_method = getattr(logger, level.lower())
+ log_method(message, **kwargs)
def _build_search_query(self, search: str) -> Dict[str, Any]:
"""Build search query component."""
@@ -106,13 +123,16 @@ async def get_auth_token(self) -> str:
if credentials.expired and credentials.refresh_token:
await run_in_threadpool(lambda: credentials.refresh(Request()))
else:
- raise standardize_error_response(
- Exception("Token expired and cannot be refreshed. User needs to re-authenticate."),
- "get auth token"
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Token expired and cannot be refreshed. User needs to re-authenticate."
)
return credentials.token
except Exception as e:
- raise standardize_error_response(e, "get auth token")
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail=f"Token retrieval failed: {str(e)}"
+ )
# -------------------------------------------------------------------------
# Email Parsing Methods
@@ -254,7 +274,7 @@ def _extract_email_body(self, email_message: email.message.Message) -> Tuple[str
body = html_part.get_payload(decode=True).decode(errors="replace")
is_html = True
except Exception as e:
- log_operation(logger, 'error', f"Error decoding HTML part: {e}")
+ self._log_operation('error', f"Error decoding HTML part: {e}")
# Fall back to text part if available
if text_part:
body = text_part.get_payload(decode=True).decode(errors="replace")
@@ -268,14 +288,14 @@ def _extract_email_body(self, email_message: email.message.Message) -> Tuple[str
body = email_message.get_payload(decode=True).decode(errors="replace")
is_html = content_type == "text/html"
except Exception as e:
- log_operation(logger, 'error', f"Error decoding non-multipart message: {e}")
+ self._log_operation('error', f"Error decoding non-multipart message: {e}")
body = email_message.get_payload(decode=False)
# Try to detect HTML if content-type wasn't reliable
is_html = bool(re.search(r'<(?:html|body|div|p|h[1-6])[^>]*>', body, re.IGNORECASE))
# Validate HTML detection with regex if needed
if is_html and not bool(re.search(r'<(?:html|body|div|p|h[1-6])[^>]*>', body, re.IGNORECASE)):
- log_operation(logger, 'warning', "Content marked as HTML but no HTML tags found, validating...")
+ self._log_operation('warning', "Content marked as HTML but no HTML tags found, validating...")
is_html = False # Reset if no HTML tags found
# Apply minimal sanitization for HTML content
@@ -382,7 +402,7 @@ def _fetch_from_imap_sync(self, token: str, email_account: str,
emails.append(email_data)
except Exception as e:
- log_operation(logger, 'error', f"Error processing email {uid}: {e}")
+ self._log_operation('error', f"Error processing email {uid}: {e}")
continue
return emails
@@ -399,7 +419,7 @@ async def save_email_to_db(self, email_data: dict) -> None:
if not existing_email:
email_schema = self._ensure_email_schema(email_data)
await self.email_repository.insert_one(email_schema)
- log_operation(logger, 'info', f"Email {email_id} inserted successfully")
+ self._log_operation('info', f"Email {email_id} inserted successfully")
except Exception as e:
self._handle_email_error(e, "save", email_data.get("email_id"), email_data.get("google_id"))
@@ -448,7 +468,7 @@ async def mark_email_as_read(self, email_id: str, google_id: str) -> Optional[Em
email_id = str(email_id)
email_data = await self.email_repository.find_by_email_and_google_id(email_id, google_id)
if not email_data or email_data["google_id"] != google_id:
- log_operation(logger, 'warning', f"Email {email_id} not found for user {google_id}")
+ self._log_operation('warning', f"Email {email_id} not found for user {google_id}")
return None
email_data["is_read"] = True
@@ -473,24 +493,46 @@ async def delete_email(self, email_id: str, google_id: str) -> bool:
return await self.email_repository.delete_by_id(str(email_id), google_id)
except Exception as e:
self._handle_email_error(e, "delete", email_id, google_id)
-
- async def delete_emails(self, google_id: str) -> bool:
+
+ async def search_emails_by_keyword(self, google_id: str, keyword: str, limit: int = 50) -> List[EmailSchema]:
"""
- Deletes all emails attached to given Google ID.
-
+ Search for emails using summary keywords.
+
Args:
- google_id: Google ID of the user
-
+ google_id: Google ID of the user.
+ keyword: Keyword to search in the summary keywords.
+ limit: Maximum number of emails to return.
+
Returns:
- bool: True if deletion successful
+ List[EmailSchema]: List of emails whose summaries match the keyword and then enriched with corresponding summary.
"""
+ logger.info(f"[Keyword Search] google_id={google_id}, keyword='{keyword}'")
+
try:
- return await self.email_repository.delete_by_google_id(google_id)
+ #Find all email_ids from summaries that match the keyword for the given user
+ email_ids = await self.summary_repository.find_email_ids_by_keyword(google_id, keyword)
+ if not email_ids:
+ return []
+
+ #Query emails from the email repository using those email_ids
+ query = {"google_id": google_id, "email_id": {"$in": [str(eid) for eid in email_ids]}}
+ emails = await self.email_repository.find_many(query, limit=limit)
+
+ # Retrieve matching summary records to extract summary_text
+ summaries = await self.summary_repository.find_many(query)
+ summary_map = {s.email_id: getattr(s, "summary_text", "") for s in summaries}
+
+ # Enrich email records with their corresponding summaries
+ enriched = []
+ for e in emails:
+ base = e if isinstance(e, dict) else e.model_dump()
+ base["summary_text"] = summary_map.get(base["email_id"], "")
+ enriched.append(base)
+
+ return enriched
+
except Exception as e:
- self._handle_email_error(e, "delete", google_id)
-
-
-
+ self._handle_email_error(e, "search by keyword", None, google_id)
# -------------------------------------------------------------------------
# Content Processing Methods
# -------------------------------------------------------------------------
@@ -637,7 +679,7 @@ async def fetch_emails(self, google_id: str, skip: int = 0, limit: int = 20,
)
debug_info["timing"]["main_query"] = (datetime.now() - start_time).total_seconds()
- log_operation(logger, 'info', f"Retrieved {len(emails)} emails out of {total} total for user {google_id}")
+ self._log_operation('info', f"Retrieved {len(emails)} emails out of {total} total for user {google_id}")
return emails, total, debug_info
except Exception as e:
@@ -657,26 +699,26 @@ async def _refresh_emails_from_imap(self, google_id: str, debug_info: Dict[str,
# Get user by google_id
user = await user_service.get_user(google_id)
if not user:
- log_operation(logger, 'error', f"User {google_id} not found in database during IMAP refresh")
+ self._log_operation('error', f"User {google_id} not found in database during IMAP refresh")
debug_info["imap_error"] = f"User {google_id} not found"
return
user_email = user.email
if not user_email:
- log_operation(logger, 'error', f"Email address not found for user {google_id}")
+ self._log_operation('error', f"Email address not found for user {google_id}")
debug_info["imap_error"] = "User email not found"
return
- log_operation(logger, 'info', f"Fetching emails for {user_email}")
+ self._log_operation('info', f"Fetching emails for {user_email}")
# Get token using google_id
token_data = await auth_service.get_token_data(google_id)
if not token_data:
- log_operation(logger, 'error', f"No token found for user {google_id}")
+ self._log_operation('error', f"No token found for user {google_id}")
debug_info["imap_error"] = "No token found for user"
return
- log_operation(logger, 'info', f"Fetching emails from IMAP for {user_email}")
+ self._log_operation('info', f"Fetching emails from IMAP for {user_email}")
imap_emails = await self.fetch_from_imap(
token=token_data.token,
email_account=user_email,
@@ -684,18 +726,18 @@ async def _refresh_emails_from_imap(self, google_id: str, debug_info: Dict[str,
limit=50
)
- log_operation(logger, 'info', f"Retrieved {len(imap_emails)} emails from IMAP for {user_email}")
+ self._log_operation('info', f"Retrieved {len(imap_emails)} emails from IMAP for {user_email}")
for email_data in imap_emails:
email_data["google_id"] = google_id
await self.save_email_to_db(email_data)
debug_info["imap_fetch_count"] = len(imap_emails)
- log_operation(logger, 'info', f"Saved {len(imap_emails)} emails to database for {user_email}")
+ self._log_operation('info', f"Saved {len(imap_emails)} emails to database for {user_email}")
except Exception as e:
+ self._log_operation('exception', f"IMAP fetch failed for user {google_id}: {str(e)}")
debug_info["imap_error"] = str(e)
- raise standardize_error_response(e, "refresh emails from imap", google_id)
finally:
debug_info["timing"]["imap_fetch_duration"] = (datetime.now() - start_time).total_seconds()
diff --git a/backend/app/services/summarization/__init__.py b/backend/app/services/summarization/__init__.py
index 2ac6acbf..3b67676c 100644
--- a/backend/app/services/summarization/__init__.py
+++ b/backend/app/services/summarization/__init__.py
@@ -5,19 +5,14 @@
and strategies.
"""
-# Standard library imports
from typing import TypeVar, Generic
-
-# Third-party imports
from fastapi import Depends, HTTPException
-# Internal imports
-from app.models import EmailSchema
from app.utils.config import Settings, get_settings, SummarizerProvider
+from app.models import EmailSchema
from .base import AdaptiveSummarizer
from .providers.openai.openai import OpenAIEmailSummarizer
from .providers.google.google import GeminiEmailSummarizer
-from .providers.openrouter.openrouter import OpenRouterEmailSummarizer
from .types import ProcessingStrategy
from .summary_service import SummaryService
@@ -28,7 +23,6 @@
'ProcessingStrategy',
'OpenAIEmailSummarizer',
'GeminiEmailSummarizer',
- 'OpenRouterEmailSummarizer',
'get_summarizer'
]
@@ -72,17 +66,6 @@ async def get_summarizer(
model=settings.summarizer_model,
batch_threshold=settings.summarizer_batch_threshold
)
- case SummarizerProvider.OPENROUTER:
- if not settings.openrouter_api_key:
- raise HTTPException(
- status_code=500,
- detail="OpenRouter API key not configured"
- )
- return OpenRouterEmailSummarizer(
- api_key=settings.openrouter_api_key,
- prompt_version=settings.summarizer_prompt_version,
- batch_threshold=settings.summarizer_batch_threshold
- )
case _:
raise HTTPException(
status_code=500,
diff --git a/backend/app/services/summarization/base.py b/backend/app/services/summarization/base.py
index 2cbd96ad..cba3272d 100644
--- a/backend/app/services/summarization/base.py
+++ b/backend/app/services/summarization/base.py
@@ -1,11 +1,9 @@
-# Standard library imports
from abc import ABC, abstractmethod
from typing import Generic, List, Optional, TypeVar
from datetime import datetime, timezone
import asyncio
+import logging
-# Internal imports
-from app.utils.helpers import get_logger
from app.models import SummarySchema, EmailSchema
from app.services.summarization.types import(
ModelBackend,
@@ -45,7 +43,7 @@ def __init__(
self.timeout = timeout
self.model_config = model_config or {}
self._metrics: List[SummaryMetrics] = []
- self._logger = get_logger(self.__class__.__name__, 'service')
+ self._logger = logging.getLogger(self.__class__.__name__)
@abstractmethod
async def prepare_content(self, email: T) -> str:
diff --git a/backend/app/services/summarization/prompts.py b/backend/app/services/summarization/prompts.py
index 93ac8efd..dc6218af 100644
--- a/backend/app/services/summarization/prompts.py
+++ b/backend/app/services/summarization/prompts.py
@@ -1,17 +1,9 @@
-"""
-Prompt management for email summarization.
-
-This module provides abstract base classes and concrete implementations for managing
-prompts across different LLM providers.
-"""
-
-# Standard library imports
+# summarization/providers/prompts.py
from abc import ABC, abstractmethod
from typing import Optional, Protocol, Dict, Any, runtime_checkable
from dataclasses import dataclass, field
from enum import Enum
-# Internal imports
from app.utils.config import PromptVersion
@dataclass
@@ -103,9 +95,15 @@ def get_response_format(self, version: Optional[PromptVersion] = None) -> Dict[s
# Core prompt templates
EMAIL_SUMMARY_SYSTEM_PROMPT = PromptTemplate(
version=PromptVersion.V2,
- template="""You are a precise email summarizer. Your task is to:
+ template="""You are a precise email summarizer that produces JSON output. Your task is to:
1. Create a concise, factual single-sentence summary capturing the key message or request
-2. Extract 3-5 key topics or themes as keywords""",
+2. Extract 3-5 key topics or themes as keywords
+
+Return your analysis in JSON format with the following structure:
+{
+ "summary": "The concise summary sentence",
+ "keywords": ["keyword1", "keyword2", "keyword3"]
+}""",
metadata={
"description": "System prompt for email summarization with JSON output",
"response_format": {"type": "json_object"},
@@ -118,10 +116,12 @@ def get_response_format(self, version: Optional[PromptVersion] = None) -> Dict[s
EMAIL_SUMMARY_USER_PROMPT = PromptTemplate(
version=PromptVersion.V2,
- template="""Please analyze this email and provide a summary and keywords.
+ template="""Please analyze this email and provide the summary and keywords in JSON format.
Email Content:
-{content}""",
+{content}
+
+Remember to format your response as JSON with 'summary' and 'keywords' fields.""",
metadata={
"description": "User prompt for email summarization with JSON format specification",
"variables": ["content"]
diff --git a/backend/app/services/summarization/providers/google/google.py b/backend/app/services/summarization/providers/google/google.py
index 9ec33c58..39c8beb6 100644
--- a/backend/app/services/summarization/providers/google/google.py
+++ b/backend/app/services/summarization/providers/google/google.py
@@ -1,9 +1,6 @@
-# Standard library imports
from typing import Dict, TypeVar, List, Optional
from datetime import datetime, timezone
import json
-
-# Third-party imports
from google import genai
from google.genai import types
from tenacity import (
@@ -12,13 +9,13 @@
wait_exponential,
retry_if_exception_type
)
-
-# Internal imports
-from app.models import SummarySchema
-from app.services.summarization.prompts import PromptManager, PromptVersion
+# internal
+from app.services.summarization.prompts import PromptManager
+from app.utils.config import ProviderModel, SummarizerProvider
from app.services.summarization.providers.openai.openai import OpenAIBackend, OpenAIEmailSummarizer
+from app.models import SummarySchema
from app.services.summarization.types import ModelBackend, ModelConfig
-from app.utils.config import ProviderModel, SummarizerProvider
+from app.services.summarization.prompts import PromptVersion
from .prompts import GeminiPromptManager
diff --git a/backend/app/services/summarization/providers/google/prompts.py b/backend/app/services/summarization/providers/google/prompts.py
index adfa6af4..7b7cf8f8 100644
--- a/backend/app/services/summarization/providers/google/prompts.py
+++ b/backend/app/services/summarization/providers/google/prompts.py
@@ -1,15 +1,12 @@
-# Standard library imports
-from dataclasses import dataclass
+from dataclasses import dataclass, field
from typing import Any, Dict, Optional
-
-# Internal imports
from app.services.summarization.prompts import(
PromptManager,
- PromptTemplate,
EMAIL_SUMMARY_SYSTEM_PROMPT,
EMAIL_SUMMARY_USER_PROMPT
)
from app.utils.config import PromptVersion
+from app.services.summarization.prompts import PromptTemplate
@dataclass
class GeminiPromptManager(PromptManager):
diff --git a/backend/app/services/summarization/providers/openai/openai.py b/backend/app/services/summarization/providers/openai/openai.py
index 3ea86553..9d576d15 100644
--- a/backend/app/services/summarization/providers/openai/openai.py
+++ b/backend/app/services/summarization/providers/openai/openai.py
@@ -1,10 +1,7 @@
-# Standard library imports
from typing import List, Optional, Dict, TypeVar
from datetime import datetime, timezone
import asyncio
import json
-
-# Third-party imports
from openai import (
RateLimitError,
APITimeoutError,
@@ -17,14 +14,14 @@
wait_exponential,
retry_if_exception_type
)
-
-# Internal imports
-from app.models import EmailSchema, SummarySchema
+# internal
from app.services.summarization.base import AdaptiveSummarizer
-from app.services.summarization.prompts import PromptManager
from app.services.summarization.types import ModelBackend, ModelConfig
-from app.utils.config import ProviderModel, SummarizerProvider, PromptVersion
+from app.models import EmailSchema, SummarySchema
+from app.utils.config import ProviderModel, SummarizerProvider
+from app.services.summarization.prompts import PromptManager
from .prompts import OpenAIPromptManager
+from app.utils.config import PromptVersion
T = TypeVar('T')
diff --git a/backend/app/services/summarization/providers/openai/prompts.py b/backend/app/services/summarization/providers/openai/prompts.py
index 85872ad9..85ab0429 100644
--- a/backend/app/services/summarization/providers/openai/prompts.py
+++ b/backend/app/services/summarization/providers/openai/prompts.py
@@ -1,8 +1,5 @@
-# Standard library imports
-from dataclasses import dataclass
+from dataclasses import dataclass, field
from typing import Any, Dict, Optional
-
-# Internal imports
from app.services.summarization.prompts import(
PromptManager,
EMAIL_SUMMARY_SYSTEM_PROMPT,
diff --git a/backend/app/services/summarization/providers/openrouter/openrouter.py b/backend/app/services/summarization/providers/openrouter/openrouter.py
deleted file mode 100644
index 13feabff..00000000
--- a/backend/app/services/summarization/providers/openrouter/openrouter.py
+++ /dev/null
@@ -1,184 +0,0 @@
-# Core
-from typing import List, Optional, Dict, TypeVar
-from datetime import datetime, timezone
-import asyncio
-import json
-import sys
-
-from openai import AsyncOpenAI, RateLimitError, APITimeoutError, APIError
-from tenacity import (
- retry,
- stop_after_attempt,
- wait_exponential,
- retry_if_exception_type
-)
-# internal
-from app.services.summarization.base import AdaptiveSummarizer
-from app.services.summarization.types import ModelBackend, ModelConfig
-from app.models import EmailSchema, SummarySchema
-from app.utils.config import ProviderModel, SummarizerProvider
-from app.services.summarization.prompts import PromptManager
-from .prompts import OpenRouterPromptManager
-from app.utils.config import PromptVersion
-from app.utils.helpers import get_logger
-
-class OpenRouterBackend(ModelBackend):
- """OpenRouter implementation that delegates model routing to the provider."""
-
- def __init__(
- self,
- api_key: str,
- prompt_manager: PromptManager,
- temperature: float = 0.3,
- max_tokens: int = 150,
- ):
- self.logger = get_logger(self.__class__.__name__, 'service')
- self.client = AsyncOpenAI(
- base_url="https://openrouter.ai/api/v1",
- api_key=api_key
- )
- self.prompt_manager = prompt_manager
- self.temperature = temperature
- self.max_tokens = max_tokens
-
- @retry(
- retry=retry_if_exception_type((
- RateLimitError,
- APITimeoutError,
- APIError,
- )),
- wait=wait_exponential(multiplier=1, min=4, max=10),
- stop=stop_after_attempt(3)
- )
- async def generate_summary(
- self,
- content: str,
- config: Optional[ModelConfig] = None
- ) -> tuple[str, List[str]]:
- """Generate a summary, letting OpenRouter handle model selection."""
- cfg = config or {}
-
- messages = [
- {
- "role": "system",
- "content": self.prompt_manager.get_system_prompt()
- },
- {
- "role": "user",
- "content": self.prompt_manager.get_user_prompt(content)
- }
- ]
-
- # Log the request payload for diagnostics
- request_payload = {
- "messages": messages,
- "temperature": cfg.get("temperature", self.temperature),
- "max_tokens": cfg.get("max_tokens", self.max_tokens),
- "models": ProviderModel.get_openrouter_fallbacks(),
- "route": "fallback"
- }
- self.logger.info(f"Sending request to OpenRouter: {json.dumps(request_payload, indent=2)}")
-
- try:
- # Let OpenRouter select the model from the provided list
- response = await self.client.chat.completions.create(
- model=ProviderModel.get_openrouter_fallbacks()[0], # Required by SDK, OR will use list below
- messages=messages,
- temperature=cfg.get("temperature", self.temperature),
- max_tokens=cfg.get("max_tokens", self.max_tokens),
- response_format=self.prompt_manager.get_response_format(),
- extra_body={
- "models": ProviderModel.get_openrouter_fallbacks(),
- "route": "fallback" # Ensures it tries models in order
- }
- )
- except APIError as e:
- self.logger.error(f"OpenRouter API request failed with status code {e.status_code}.")
- if e.body:
- self.logger.error(f"Error response body: {e.body}")
- raise # Re-raise the exception to be handled by the retry decorator or calling service
-
- # Parse the response
- try:
- result = json.loads(response.choices[0].message.content)
- return result.get("summary", ""), result.get("keywords", [])
- except (json.JSONDecodeError, KeyError, IndexError) as e:
- # Fallback handling if JSON parsing fails
- self.logger.warning(f"Failed to parse JSON. Response: {response.choices[0].message.content if response.choices else 'empty'}. Error: {e}")
- summary = response.choices[0].message.content
- return summary.strip(), []
-
- async def batch_generate_summaries(
- self,
- contents: List[str],
- config: Optional[ModelConfig] = None
- ) -> List[tuple[str, List[str]]]:
- """Generate summaries for multiple emails."""
- # Implement concurrent processing with rate limiting
- semaphore = asyncio.Semaphore(5) # Limit concurrent API calls
-
- async def _process_with_semaphore(content: str) -> tuple[str, List[str]]:
- async with semaphore:
- return await self.generate_summary(content, config)
-
- return await asyncio.gather(
- *[_process_with_semaphore(content) for content in contents]
- )
-
- @property
- def model_info(self) -> Dict[str, str]:
- return {
- "provider": "OpenRouter",
- "model": "auto" # Model is selected by OpenRouter
- }
-
-class OpenRouterEmailSummarizer(AdaptiveSummarizer[EmailSchema]):
- """Email summarizer implementation using OpenRouter's API."""
-
- def __init__(
- self,
- api_key: str,
- batch_threshold: int = 10,
- max_batch_size: int = 50,
- timeout: float = 30.0,
- prompt_version: PromptVersion = PromptVersion.latest(),
- ):
- prompt_manager = OpenRouterPromptManager(prompt_version=prompt_version)
- backend = OpenRouterBackend(
- api_key=api_key,
- prompt_manager=prompt_manager,
- )
- super().__init__(
- model_backend=backend,
- prompt_manager=prompt_manager,
- batch_threshold=batch_threshold,
- max_batch_size=max_batch_size,
- timeout=timeout
- )
-
- async def prepare_content(self, email: EmailSchema) -> str:
- """Transform EmailSchema into processable content."""
- return (
- f"From: {email.sender}\n"
- f"To: {', '.join(email.recipients)}\n"
- f"Subject: {email.subject}\n"
- f"Date: {email.received_at}\n\n"
- f"Body:\n{email.body}"
- )
-
- def create_summary(
- self,
- email_id: str,
- summary_text: str,
- keywords: List[str],
- google_id: str
- ) -> SummarySchema:
- """Create a SummarySchema from processing results."""
- return SummarySchema(
- email_id=email_id,
- summary_text=summary_text,
- keywords=keywords,
- generated_at=datetime.now(timezone.utc),
- model_info=self._backend.model_info,
- google_id=google_id
- )
\ No newline at end of file
diff --git a/backend/app/services/summarization/providers/openrouter/prompts.py b/backend/app/services/summarization/providers/openrouter/prompts.py
deleted file mode 100644
index 8f28ec4f..00000000
--- a/backend/app/services/summarization/providers/openrouter/prompts.py
+++ /dev/null
@@ -1,69 +0,0 @@
-from dataclasses import dataclass, field
-from typing import Any, Dict, List, Optional
-from app.services.summarization.prompts import(
- BasePromptManager,
- PromptTemplate,
- EMAIL_SUMMARY_USER_PROMPT,
- PromptManager,
- EMAIL_SUMMARY_SYSTEM_PROMPT
-)
-from app.utils.config import PromptVersion
-
-# New OpenRouter-specific system prompt
-OPENROUTER_SYSTEM_PROMPT_TEMPLATE = """You are a precise email summarizer. Your task is to analyze the user's email and provide a response in valid JSON format.
-
-You must respond with a JSON object containing exactly these fields:
-- "summary": A concise, factual single-sentence summary capturing the key message or request
-- "keywords": An array of 3-5 key topics or themes extracted from the email
-
-Example response format:
-{
- "summary": "The sender is requesting a meeting to discuss the quarterly budget review.",
- "keywords": ["meeting", "quarterly", "budget", "review", "discussion"]
-}
-
-IMPORTANT: Respond only with a valid JSON object. Do not add any explanatory text or wrap the JSON in markdown code blocks."""
-
-EMAIL_SUMMARY_OPENROUTER_SYSTEM_PROMPT = PromptTemplate(
- version=PromptVersion.V1,
- template=OPENROUTER_SYSTEM_PROMPT_TEMPLATE,
- metadata={}
-)
-
-@dataclass
-class OpenRouterPromptTemplate(PromptTemplate):
- """
- OpenRouter-specific prompt template with basic JSON response format.
- """
-
- def get_response_format(self) -> Dict[str, Any]:
- """
- Return the basic JSON response format supported by most OpenRouter models.
- """
- return {
- "type": "json_object"
- }
-
-# Simplified prompt template
-EMAIL_SUMMARY_OPENROUTER_PROMPT = OpenRouterPromptTemplate(
- version=PromptVersion.V1,
- template=EMAIL_SUMMARY_OPENROUTER_SYSTEM_PROMPT.template,
- metadata={}
-)
-
-@dataclass
-class OpenRouterPromptManager(PromptManager):
- """OpenRouter-specific prompt management, mirroring OpenAI's structure."""
- prompt_version: PromptVersion = PromptVersion.latest()
-
- def get_system_prompt(self, version: Optional[PromptVersion] = None) -> str:
- return OPENROUTER_SYSTEM_PROMPT_TEMPLATE
-
- def get_user_prompt(self, content: str, version: Optional[PromptVersion] = None) -> str:
- return EMAIL_SUMMARY_USER_PROMPT.template.format(content=content)
-
- def get_response_format(self, version: Optional[PromptVersion] = None) -> Dict[str, Any]:
- """
- Return the basic JSON response format supported by most OpenRouter models.
- """
- return {"type": "json_object"}
\ No newline at end of file
diff --git a/backend/app/services/summarization/summary_service.py b/backend/app/services/summarization/summary_service.py
index d28a610e..10654c5a 100644
--- a/backend/app/services/summarization/summary_service.py
+++ b/backend/app/services/summarization/summary_service.py
@@ -2,16 +2,13 @@
Service for handling email summarization operations.
"""
-# Standard library imports
+import logging
from typing import List, Optional, Dict, Any
from datetime import datetime, timezone, timedelta
-
-# Third-party imports
-from fastapi import HTTPException, status, Depends
+from fastapi import HTTPException, status
+from fastapi import Depends
from bson import ObjectId
-# Internal imports
-from app.utils.helpers import get_logger, log_operation, standardize_error_response
from app.models import EmailSchema, SummarySchema
from app.services.database.repositories.summary_repository import SummaryRepository
from app.services.database.factories import get_summary_repository, get_email_service
@@ -22,11 +19,15 @@
GeminiEmailSummarizer
)
-# -------------------------------------------------------------------------
-# Configuration
-# -------------------------------------------------------------------------
+# Configure logging with format and level
+logging.basicConfig(
+ level=logging.DEBUG,
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S'
+)
-logger = get_logger(__name__, 'service')
+# Create module-specific logger
+logger = logging.getLogger(__name__)
class SummaryService:
"""
@@ -60,9 +61,10 @@ async def initialize(self):
await self.summary_repository.create_index("generated_at") # For time-based queries
await self.summary_repository.create_index("google_id") # For user-specific summaries
- log_operation(logger, 'info', "Summary collection indexes initialized")
+ logger.info("Summary collection indexes initialized")
except Exception as e:
- raise standardize_error_response(e, "initialize summary indexes")
+ logger.error(f"Failed to initialize summary indexes: {e}")
+ raise
async def save_summary(self, summary: SummarySchema, google_id: str) -> str:
"""
@@ -99,11 +101,12 @@ async def save_summary(self, summary: SummarySchema, google_id: str) -> str:
if not result:
raise Exception("Failed to save summary")
- log_operation(logger, 'debug', f"Summary saved for email {summary.email_id} for user {google_id}")
+ logger.debug(f"Summary saved for email {summary.email_id} for user {google_id}")
return summary.email_id
except Exception as e:
- raise standardize_error_response(e, "save summary", summary.email_id)
+ logger.error(f"Failed to save summary: {e}")
+ raise HTTPException(status_code=500, detail=str(e))
async def get_summary(self, email_id: str, google_id: str) -> Optional[SummarySchema]:
"""
@@ -129,7 +132,8 @@ async def get_summary(self, email_id: str, google_id: str) -> Optional[SummarySc
# Otherwise create a new SummarySchema instance
return SummarySchema(**result)
except Exception as e:
- raise standardize_error_response(e, "get summary", email_id)
+ logger.error(f"Failed to retrieve summary for email {email_id}: {e}")
+ raise HTTPException(status_code=500, detail=str(e))
async def get_summaries(
self,
@@ -173,7 +177,8 @@ async def get_summaries(
# Convert results to SummarySchema objects if needed
return [result if isinstance(result, SummarySchema) else SummarySchema(**result) for result in results]
except Exception as e:
- raise standardize_error_response(e, "get summaries")
+ logger.error(f"Failed to retrieve summaries: {e}")
+ raise HTTPException(status_code=500, detail=str(e))
async def search_by_keywords(
self,
@@ -203,7 +208,8 @@ async def search_by_keywords(
results = await self.summary_repository.find_many(query, limit=limit)
return [result if isinstance(result, SummarySchema) else SummarySchema(**result) for result in results]
except Exception as e:
- raise standardize_error_response(e, "search summaries by keywords")
+ logger.error(f"Failed to search summaries by keywords: {e}")
+ raise HTTPException(status_code=500, detail=str(e))
async def get_recent_summaries(
self,
@@ -240,7 +246,7 @@ async def get_recent_summaries(
if google_id:
query["google_id"] = google_id
- log_operation(logger, 'debug', f"Querying summaries between {cutoff_date.isoformat()} and {now.isoformat()}")
+ logger.debug(f"Querying summaries between {cutoff_date.isoformat()} and {now.isoformat()}")
results = await self.summary_repository.find_many(
query,
@@ -248,10 +254,11 @@ async def get_recent_summaries(
sort=[("generated_at", -1)]
)
- log_operation(logger, 'debug', f"Found {len(results)} summaries matching query")
+ logger.debug(f"Found {len(results)} summaries matching query")
return [result if isinstance(result, SummarySchema) else SummarySchema(**result) for result in results]
except Exception as e:
- raise standardize_error_response(e, "get recent summaries")
+ logger.error(f"Failed to retrieve recent summaries: {e}", exc_info=True)
+ raise HTTPException(status_code=500, detail=str(e))
async def delete_summary(self, email_id: str, google_id: str) -> bool:
"""
@@ -272,39 +279,14 @@ async def delete_summary(self, email_id: str, google_id: str) -> bool:
deleted = result.deleted_count > 0
if deleted:
- log_operation(logger, 'info', f"Summary for email {email_id} deleted for user {google_id}")
- else:
- log_operation(logger, 'info', f"No summary found for email {email_id} for user {google_id} to delete")
-
- return deleted
- except Exception as e:
- raise standardize_error_response(e, "delete summary", email_id)
-
- async def delete_summaries_by_google_id(self, google_id: str) -> bool:
- """
- Delete all summaries for a specific Google user ID.
-
- Args:
- google_id: Google ID of the user whose summaries to delete
-
- Returns:
- bool: True if any summaries were deleted, False otherwise
-
- Raises:
- Exception: If database operation fails
- """
- try:
- result = await self.summary_repository.delete_by_google_id({"google_id": google_id})
- deleted = result.deleted_count > 0
-
- if deleted:
- log_operation(logger, 'info', f"All summaries deleted for user {google_id}")
+ logger.info(f"Summary for email {email_id} deleted for user {google_id}")
else:
- log_operation(logger, 'info', f"No summaries found for user {google_id} to delete")
+ logger.info(f"No summary found for email {email_id} for user {google_id} to delete")
return deleted
except Exception as e:
- raise standardize_error_response(e, "delete summaries by google id", google_id)
+ logger.error(f"Failed to delete summary for email {email_id}: {e}")
+ raise HTTPException(status_code=500, detail=str(e))
async def save_summaries_batch(self, summaries: List[SummarySchema], google_id: str) -> Dict[str, int]:
"""
@@ -344,13 +326,14 @@ async def save_summaries_batch(self, summaries: List[SummarySchema], google_id:
"inserted": result.upserted_count,
"modified": result.modified_count
}
- log_operation(logger, 'info', f"Batch summary save: {result.upserted_count} inserted, "
+ logger.info(f"Batch summary save: {result.upserted_count} inserted, "
f"{result.modified_count} modified for user {google_id}")
return stats
return {"inserted": 0, "modified": 0}
except Exception as e:
- raise standardize_error_response(e, "save summaries batch")
+ logger.error(f"Error in batch saving summaries: {e}")
+ raise HTTPException(status_code=500, detail=str(e))
async def count_summaries(self, query: Dict = None, google_id: str = None) -> int:
"""
@@ -372,7 +355,8 @@ async def count_summaries(self, query: Dict = None, google_id: str = None) -> in
query["google_id"] = google_id
return await self.summary_repository.count_documents(query)
except Exception as e:
- raise standardize_error_response(e, "count summaries")
+ logger.error(f"Failed to count summaries: {e}")
+ raise HTTPException(status_code=500, detail=str(e))
async def get_summaries_by_ids(self, email_ids: List[str], google_id: str) -> List[SummarySchema]:
"""
@@ -405,11 +389,12 @@ async def get_summaries_by_ids(self, email_ids: List[str], google_id: str) -> Li
summaries.append(SummarySchema(**doc))
# Log how many were found
- log_operation(logger, 'debug', f"Found {len(summaries)} summaries out of {len(email_ids)} requested for user {google_id}")
+ logger.debug(f"Found {len(summaries)} summaries out of {len(email_ids)} requested for user {google_id}")
return summaries
except Exception as e:
- raise standardize_error_response(e, "get summaries by ids")
+ logger.error(f"Failed to retrieve summaries by IDs: {e}")
+ raise HTTPException(status_code=500, detail=str(e))
async def get_or_create_summary(
self,
@@ -437,7 +422,7 @@ async def get_or_create_summary(
# Get email data
email = await self.email_service.get_email(email_id, google_id)
if not email:
- log_operation(logger, 'warning', f"Email {email_id} not found for user {google_id}")
+ logger.warning(f"Email {email_id} not found for user {google_id}")
return None
# Generate summary using EmailSchema directly
@@ -447,7 +432,7 @@ async def get_or_create_summary(
)
if not summaries:
- log_operation(logger, 'warning', f"Failed to generate summary for email {email_id}")
+ logger.warning(f"Failed to generate summary for email {email_id}")
return None
# Create a new SummarySchema with the google_id
@@ -457,12 +442,13 @@ async def get_or_create_summary(
# Store summary
await self.save_summary(summary, google_id)
- log_operation(logger, 'info', f"Created new summary for email {email_id}")
+ logger.info(f"Created new summary for email {email_id}")
return summary.model_dump()
except Exception as e:
- raise standardize_error_response(e, "get or create summary", email_id)
+ logger.error(f"Failed to get or create summary for email {email_id}: {e}")
+ raise HTTPException(status_code=500, detail=str(e))
async def get_or_create_summaries_batch(
self,
@@ -523,15 +509,14 @@ async def get_or_create_summaries_batch(
missing_emails.append(email)
else:
failed_emails.append(email_id)
- log_operation(logger, 'warning', f"Email {email_id} not found for user {google_id}")
+ logger.warning(f"Email {email_id} not found for user {google_id}")
except Exception as e:
failed_emails.append(email_id)
- log_operation(logger, 'warning', f"Error fetching email {email_id}: {e}")
+ logger.warning(f"Error fetching email {email_id}: {e}")
if missing_emails:
# Generate summaries for missing emails
try:
- log_operation(logger, 'info', f"Attempting to generate summaries for {len(missing_emails)} emails.")
new_summaries = await summarizer.summarize(
missing_emails,
strategy=ProcessingStrategy.ADAPTIVE
@@ -542,7 +527,7 @@ async def get_or_create_summaries_batch(
await self.save_summaries_batch(new_summaries, google_id)
all_summaries.extend(new_summaries)
except Exception as e:
- log_operation(logger, 'error', f"Failed to generate summaries for batch: {e}")
+ logger.error(f"Failed to generate summaries for batch: {e}")
all_failed_summaries.extend([email.email_id for email in missing_emails])
continue
@@ -552,13 +537,13 @@ async def get_or_create_summaries_batch(
all_summaries.extend(existing_summaries)
except Exception as e:
- log_operation(logger, 'error', f"Error processing batch {i//batch_size + 1}: {e}")
+ logger.error(f"Error processing batch {i//batch_size + 1}: {e}")
# Mark all emails in this batch as failed
all_failed_summaries.extend(batch_ids)
continue
# Log final results
- log_operation(logger, 'info',
+ logger.info(
f"Batch summary results for user {google_id}: "
f"{len(all_summaries)} successful, {len(all_missing_emails)} missing emails, "
f"{len(all_failed_summaries)} failed summaries"
@@ -571,4 +556,8 @@ async def get_or_create_summaries_batch(
}
except Exception as e:
- raise standardize_error_response(e, "process batch summaries")
\ No newline at end of file
+ logger.error(f"Failed to process batch summaries: {e}")
+ raise HTTPException(
+ status_code=500,
+ detail=f"Failed to process summaries: {str(e)}"
+ )
\ No newline at end of file
diff --git a/backend/app/services/summarization/types.py b/backend/app/services/summarization/types.py
index ddd2e0db..13606182 100644
--- a/backend/app/services/summarization/types.py
+++ b/backend/app/services/summarization/types.py
@@ -1,12 +1,9 @@
-# Standard library imports
from typing import Protocol, List, Optional, Dict
from enum import Enum, auto
+from pydantic import Field
from dataclasses import dataclass
from datetime import datetime, timezone
-# Third-party imports
-from pydantic import Field
-
ModelConfig = Dict[str, any]
class ProcessingStrategy(Enum):
diff --git a/backend/app/services/user_service.py b/backend/app/services/user_service.py
index 64f4b1c4..dc47c51f 100644
--- a/backend/app/services/user_service.py
+++ b/backend/app/services/user_service.py
@@ -2,22 +2,24 @@
User service for handling user-related operations.
"""
-# Standard library imports
+import logging
from typing import Optional, Dict, Any
-
-# Third-party imports
from fastapi import HTTPException, status
+from bson import ObjectId
+from google.oauth2.credentials import Credentials
-# Internal imports
-from app.utils.helpers import get_logger, log_operation, standardize_error_response
-from app.models import UserSchema, PreferencesSchema
-from app.services.database import get_user_repository, UserRepository
+# Import from app modules
+from app.models import UserSchema, TokenData, PreferencesSchema
+from app.services.database import UserRepository, get_user_repository
-# -------------------------------------------------------------------------
-# Configuration
-# -------------------------------------------------------------------------
+# Configure logging
+logging.basicConfig(
+ level=logging.DEBUG,
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S'
+)
-logger = get_logger(__name__, 'service')
+logger = logging.getLogger(__name__)
class UserService:
"""
@@ -29,14 +31,16 @@ class UserService:
- Managing user authentication state
"""
- def __init__(self, user_repository: UserRepository = None):
+ def __init__(self, user_repository: UserRepository):
"""
Initialize the user service.
Args:
user_repository: User repository instance
"""
- self.user_repository = user_repository or get_user_repository()
+ self.user_repository = user_repository
+ # Note: We can't call ensure_indexes here because it's async
+ # The indexes will be created on first use
async def get_user(self, user_id: str) -> Optional[UserSchema]:
"""
@@ -51,7 +55,11 @@ async def get_user(self, user_id: str) -> Optional[UserSchema]:
try:
return await self.user_repository.find_by_id(user_id)
except Exception as e:
- raise standardize_error_response(e, "get user", user_id)
+ logger.error(f"Failed to get user: {e}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to get user"
+ )
async def get_user_by_email(self, email: str) -> Optional[UserSchema]:
"""
@@ -66,7 +74,11 @@ async def get_user_by_email(self, email: str) -> Optional[UserSchema]:
try:
return await self.user_repository.find_by_email(email)
except Exception as e:
- raise standardize_error_response(e, "get user by email", email)
+ logger.error(f"Failed to get user by email: {e}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to get user by email"
+ )
async def create_user(self, user_data: Dict[str, Any]) -> UserSchema:
"""
@@ -78,6 +90,7 @@ async def create_user(self, user_data: Dict[str, Any]) -> UserSchema:
Returns:
Created user
"""
+
try:
# Create a complete user data dictionary with all required fields
complete_user_data = {
@@ -105,17 +118,20 @@ async def create_user(self, user_data: Dict[str, Any]) -> UserSchema:
user_id = await self.user_repository.insert_one(user)
user._id = user_id
- log_operation(logger, 'info', f"Created user: {user.email}")
return user
except Exception as e:
- raise standardize_error_response(e, "create user", user_data.get("email"))
+ logger.error(f"Failed to create user: {e}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to create user"
+ )
- async def update_user(self, google_id: str, user_data: Dict[str, Any]) -> Optional[UserSchema]:
+ async def update_user(self, user_id: str, user_data: Dict[str, Any]) -> Optional[UserSchema]:
"""
Update a user.
Args:
- google_id: User Google ID
+ user_id: User ID
user_data: Updated user data
Returns:
@@ -123,45 +139,49 @@ async def update_user(self, google_id: str, user_data: Dict[str, Any]) -> Option
"""
try:
# First get the current user to ensure it exists
- current_user = await self.user_repository.find_by_id(google_id)
+ current_user = await self.user_repository.find_by_id(user_id)
if not current_user:
- log_operation(logger, 'warning', f"User not found: {google_id}")
+ logger.error(f"User not found: {user_id}")
return None
# Update the user
- success = await self.user_repository.update_one(google_id, user_data)
+ success = await self.user_repository.update_one(user_id, user_data)
if not success:
- log_operation(logger, 'warning', f"Update failed for user: {google_id}")
+ logger.error(f"Update failed for user: {user_id}")
return None
# Get the updated user
- updated_user = await self.user_repository.find_by_id(google_id)
+ updated_user = await self.user_repository.find_by_id(user_id)
if not updated_user:
- log_operation(logger, 'warning', f"Failed to fetch updated user: {google_id}")
+ logger.error(f"Failed to fetch updated user: {user_id}")
return None
- log_operation(logger, 'info', f"Updated user: {google_id}")
return UserSchema(**updated_user)
except Exception as e:
- raise standardize_error_response(e, "update user", google_id)
+ logger.error(f"Failed to update user: {e}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to update user"
+ )
- async def delete_user(self, google_id: str) -> bool:
+ async def delete_user(self, user_id: str) -> bool:
"""
Delete a user.
Args:
- google_id: User Google ID
+ user_id: User ID
Returns:
True if deleted, False otherwise
"""
try:
- result = await self.user_repository.delete_by_google_id(google_id)
- if result:
- log_operation(logger, 'info', f"Deleted user: {google_id}")
- return result
+ return await self.user_repository.delete_one(user_id)
except Exception as e:
- raise standardize_error_response(e, "delete user", google_id)
+ logger.error(f"Failed to delete user: {e}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to delete user"
+ )
async def get_preferences(self, google_id: str) -> Dict[str, Any]:
"""
@@ -174,11 +194,15 @@ async def get_preferences(self, google_id: str) -> Dict[str, Any]:
Dict[str, Any]: User preferences
"""
try:
- log_operation(logger, 'debug', f"Fetching preferences for Google ID: {google_id}")
+ logger.debug(f"Fetching preferences for Google ID: {google_id}")
user = await self.user_repository.find_one({"google_id": google_id})
return user.get("preferences", {}) if user else {}
except Exception as e:
- raise standardize_error_response(e, "get preferences", google_id)
+ logger.error(f"Failed to get preferences: {e}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to get preferences"
+ )
async def update_preferences(self, google_id: str, preferences: Dict[str, Any]) -> bool:
"""
@@ -192,13 +216,15 @@ async def update_preferences(self, google_id: str, preferences: Dict[str, Any])
bool: True if update successful
"""
try:
- log_operation(logger, 'debug', f"Updating preferences for Google ID: {google_id}")
+ logger.debug(f"Updating preferences for Google ID: {google_id}")
result = await self.user_repository.update_one(
- {"google_id": google_id},
- {"$set": {"preferences": preferences}}
- )
- if result:
- log_operation(logger, 'info', f"Updated preferences for user: {google_id}")
+ {"google_id": google_id},
+ {"$set": {"preferences": preferences}}
+ )
return result
except Exception as e:
- raise standardize_error_response(e, "update preferences", google_id)
+ logger.error(f"Failed to update preferences: {e}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to update preferences"
+ )
diff --git a/backend/app/tests/conftest.py b/backend/app/tests/conftest.py
index fa27756b..cf6cc4b1 100644
--- a/backend/app/tests/conftest.py
+++ b/backend/app/tests/conftest.py
@@ -89,6 +89,5 @@ def test_settings():
"OPENAI_API_KEY": os.getenv("OPENAI_API_KEY", "test_key_123"),
"GOOGLE_CLIENT_ID": os.getenv("GOOGLE_CLIENT_ID", "test_client_id"),
"GOOGLE_CLIENT_SECRET": os.getenv("GOOGLE_CLIENT_SECRET", "test_client_secret"),
- "MONGO_URI": os.getenv("MONGO_URI", "mongodb://localhost:27017/test_db"),
- "OPENROUTER_API_KEY": os.getenv("OPENROUTER_API_KEY", "sk-or-test-key-123456789")
+ "MONGO_URI": os.getenv("MONGO_URI", "mongodb://localhost:27017/test_db")
}
\ No newline at end of file
diff --git a/backend/app/tests/test_config.py b/backend/app/tests/test_config.py
index 9e08dba8..f8d93817 100644
--- a/backend/app/tests/test_config.py
+++ b/backend/app/tests/test_config.py
@@ -16,7 +16,6 @@ class MockSettings(Settings):
google_client_secret: str = os.getenv("GOOGLE_CLIENT_SECRET", "test-client-secret")
email_account: str = os.getenv("EMAIL_ACCOUNT", "test@example.com")
mongo_uri: str = os.getenv("MONGO_URI", "mongodb://localhost:27017/test_db")
- openrouter_api_key: str = os.getenv("OPENROUTER_API_KEY", "sk-or-test-key-123456789")
openai_api_key: str = os.getenv("OPENAI_API_KEY", "sk-test-key-123456789")
deepseek_api_key: str | None = os.getenv("DEEPSEEK_API_KEY")
gemini_api_key: str | None = os.getenv("GEMINI_API_KEY")
diff --git a/backend/app/tests/unit/summary/test_openrouter_provider.py b/backend/app/tests/unit/summary/test_openrouter_provider.py
deleted file mode 100644
index df23185b..00000000
--- a/backend/app/tests/unit/summary/test_openrouter_provider.py
+++ /dev/null
@@ -1,92 +0,0 @@
-import pytest
-import asyncio
-from unittest.mock import AsyncMock, MagicMock, patch
-
-from app.models import EmailSchema
-from app.services.summarization.providers.openrouter.openrouter import OpenRouterEmailSummarizer, OpenRouterBackend
-from app.services.summarization.providers.openrouter.prompts import OpenRouterPromptManager
-from app.utils.config import PromptVersion
-
-@pytest.fixture
-def email_schema_fixture():
- """Fixture for a sample EmailSchema."""
- return EmailSchema(
- email_id="test_email_id",
- google_id="test_google_id",
- sender="sender@example.com",
- recipients=["recipient@example.com"],
- subject="Test Subject",
- body="This is a test email body.",
- received_at="2023-01-01T00:00:00Z"
- )
-
-@pytest.fixture
-def summarizer():
- """Fixture for OpenRouterEmailSummarizer."""
- return OpenRouterEmailSummarizer(api_key="test_api_key")
-
-@pytest.mark.asyncio
-async def test_summarizer_prepare_content(summarizer: OpenRouterEmailSummarizer, email_schema_fixture: EmailSchema):
- """Test that prepare_content formats the email correctly."""
- content = await summarizer.prepare_content(email_schema_fixture)
- assert "From: sender@example.com" in content
- assert "Subject: Test Subject" in content
- assert "Body:\nThis is a test email body." in content
-
-@pytest.mark.asyncio
-@patch('app.services.summarization.providers.openrouter.openrouter.AsyncOpenAI')
-async def test_backend_generate_summary_success(mock_async_openai, email_schema_fixture: EmailSchema):
- """Test successful summary generation with valid JSON response."""
- # Mock the OpenAI client
- mock_client = MagicMock()
- mock_async_openai.return_value = mock_client
-
- # Mock the response from chat.completions.create
- mock_completion = MagicMock()
- mock_completion.choices = [MagicMock()]
- mock_completion.choices[0].message.content = '{"summary": "This is a test summary.", "keywords": ["test", "email"]}'
-
- mock_client.chat.completions.create = AsyncMock(return_value=mock_completion)
-
- # Setup backend
- prompt_manager = OpenRouterPromptManager(prompt_version=PromptVersion.V1)
- backend = OpenRouterBackend(api_key="test_key", prompt_manager=prompt_manager)
-
- # Run the method
- summary, keywords = await backend.generate_summary("test content")
-
- # Assertions
- assert summary == "This is a test summary."
- assert keywords == ["test", "email"]
- mock_client.chat.completions.create.assert_called_once()
- call_args = mock_client.chat.completions.create.call_args
- assert call_args.kwargs['model'] is not None
- assert call_args.kwargs['messages'] is not None
-
-
-@pytest.mark.asyncio
-@patch('app.services.summarization.providers.openrouter.openrouter.AsyncOpenAI')
-async def test_backend_generate_summary_json_error(mock_async_openai):
- """Test summary generation with a non-JSON response."""
- # Mock the OpenAI client
- mock_client = MagicMock()
- mock_async_openai.return_value = mock_client
-
- # Mock a non-JSON response
- mock_completion = MagicMock()
- mock_completion.choices = [MagicMock()]
- mock_completion.choices[0].message.content = 'This is just a plain text summary.'
-
- mock_client.chat.completions.create = AsyncMock(return_value=mock_completion)
-
- # Setup backend
- prompt_manager = OpenRouterPromptManager(prompt_version=PromptVersion.V1)
- backend = OpenRouterBackend(api_key="test_key", prompt_manager=prompt_manager)
-
- # Run the method
- summary, keywords = await backend.generate_summary("test content")
-
- # Assertions for fallback behavior
- assert summary == "This is just a plain text summary."
- assert keywords == []
- mock_client.chat.completions.create.assert_called_once()
\ No newline at end of file
diff --git a/backend/app/utils/config.py b/backend/app/utils/config.py
index c6ef111f..ce9297b1 100644
--- a/backend/app/utils/config.py
+++ b/backend/app/utils/config.py
@@ -2,18 +2,18 @@
from pydantic_settings import BaseSettings
from functools import lru_cache
from enum import Enum
-from typing import Optional, List
-from pydantic import ConfigDict, model_validator
+from typing import Optional
+from pydantic import ConfigDict
class SummarizerProvider(str, Enum):
OPENAI = "openai" # Currently Best option
GOOGLE = "gemini"
- OPENROUTER = "openrouter"
+ # TODO: Add DeepSeek
LOCAL = "local"
@classmethod
def default(cls) -> "SummarizerProvider":
- return cls.OPENROUTER
+ return cls.OPENAI
class ProviderModel(str, Enum):
@@ -24,11 +24,10 @@ class ProviderModel(str, Enum):
# Gemini Models
GEMINI_2_FLASH_LITE = "gemini-2.0-flash-lite-preview-02-05"
GEMINI_2_FLASH = "gemini-2.0-flash-001"
+
+ # TODO: Check for updates to flash_lite!
- # OpenRouter -
- OR_GPT_4_1_NANO = "openai/gpt-4.1-nano"
- OR_MINISTRAL_8B = "mistralai/ministral-8b"
- OR_GEMINI_2_5_FLASH = "google/gemini-2.5-flash-preview"
+ # DeepSeek Models TODO: UNIMPLEMENTED
@classmethod
def default_for_provider(cls, provider: SummarizerProvider) -> "ProviderModel":
@@ -36,23 +35,11 @@ def default_for_provider(cls, provider: SummarizerProvider) -> "ProviderModel":
defaults = {
SummarizerProvider.OPENAI: cls.GPT_4O_MINI,
SummarizerProvider.GOOGLE: cls.GEMINI_2_FLASH_LITE,
- SummarizerProvider.OPENROUTER: cls.OR_GPT_4_1_NANO,
- SummarizerProvider.LOCAL: cls.GPT_4O_MINI, # Fallback to OpenAI
+ SummarizerProvider.LOCAL: cls.GEMINI_2_FLASH_LITE, # Fallback to OpenAI
}
- return defaults.get(provider, cls.GPT_4O_MINI)
- @classmethod
- def get_openrouter_fallbacks(cls) -> List["ProviderModel"]:
- """
- Get ordered list of OpenRouter models for fallback.
- The API limits the fallback list to a maximum of 3 models.
- """
- return [
- cls.OR_GPT_4_1_NANO,
- cls.OR_MINISTRAL_8B,
- cls.OR_GEMINI_2_5_FLASH,
- ]
+ return defaults.get(provider, cls.GPT_4O_MINI)
class PromptVersion(str, Enum):
V1 = "v1"
@@ -82,12 +69,11 @@ class Settings(BaseSettings):
oauth_callback_url: Optional[str] = None
# AI Providers
- openrouter_api_key: Optional[str] = None # No longer required
- openai_api_key: Optional[str] = None
- google_api_key: Optional[str] = None
- deepseek_api_key: Optional[str] = None
- gemini_api_key: Optional[str] = None
-
+ openai_api_key: str
+ google_api_key: str | None = None
+ deepseek_api_key: str | None = None
+ gemini_api_key: str | None = None
+
# Summarizer settings
summarizer_provider: SummarizerProvider = SummarizerProvider.default()
summarizer_model: ProviderModel = ProviderModel.default_for_provider(summarizer_provider)
@@ -95,17 +81,6 @@ class Settings(BaseSettings):
summarizer_prompt_version: PromptVersion = PromptVersion.latest()
model_config = ConfigDict(env_file=".env", use_enum_values=True)
-
- @model_validator(mode='after')
- def validate_api_keys(self) -> 'Settings':
- """Validate that the required API keys are present for the selected provider."""
- if self.summarizer_provider == SummarizerProvider.OPENROUTER and not self.openrouter_api_key:
- raise ValueError("OPENROUTER_API_KEY must be set when using the OpenRouter provider")
- if self.summarizer_provider == SummarizerProvider.OPENAI and not self.openai_api_key:
- raise ValueError("OPENAI_API_KEY must be set when using the OpenAI provider")
- if self.summarizer_provider == SummarizerProvider.GOOGLE and not self.google_api_key:
- raise ValueError("GOOGLE_API_KEY must be set when using the Google provider")
- return self
@lru_cache()
def get_settings() -> Settings:
diff --git a/backend/app/utils/helpers.py b/backend/app/utils/helpers.py
deleted file mode 100644
index 56cfc69e..00000000
--- a/backend/app/utils/helpers.py
+++ /dev/null
@@ -1,146 +0,0 @@
-"""
-Helper functions for Email Essence FastAPI application.
-
-This module contains shared utilities for logging and error handling that can be
-used across the application without creating circular dependencies.
-"""
-
-# Standard library imports
-import logging
-import os
-from typing import Optional
-
-# Third-party imports
-from fastapi import HTTPException, status
-
-
-def _get_logging_level_config() -> dict:
- """
- Get logging level configuration based on environment.
-
- Returns:
- dict: Mapping of module types to logging levels
- """
- environment = os.getenv('ENVIRONMENT', 'production').lower()
-
- # Logging level configuration by environment
- config = {
- 'development': {
- 'router': logging.DEBUG,
- 'service': logging.DEBUG,
- 'default': logging.DEBUG
- },
- 'production': {
- 'router': logging.INFO,
- 'service': logging.INFO,
- 'default': logging.INFO
- },
- 'testing': {
- 'router': logging.WARNING,
- 'service': logging.WARNING,
- 'default': logging.WARNING
- }
- }
-
- return config.get(environment, config['production'])
-
-
-def get_logger(name: str, module_type: str = 'default') -> logging.Logger:
- """
- Get a configured logger instance with appropriate level based on environment.
-
- Args:
- name: Logger name (typically __name__)
- module_type: Type of module ('router', 'service', or 'default')
-
- Returns:
- logging.Logger: Configured logger instance
- """
- logger = logging.getLogger(name)
-
- # Get appropriate logging level based on environment and module type
- level_config = _get_logging_level_config()
- logging_level = level_config.get(module_type, level_config['default'])
-
- logger.setLevel(logging_level)
-
- # Ensure the logger has a handler to output messages to the console
- if not logger.handlers:
- handler = logging.StreamHandler()
- formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
- handler.setFormatter(formatter)
- logger.addHandler(handler)
- # Prevent logs from being passed to the root logger to avoid duplicate output
- logger.propagate = False
-
- return logger
-
-
-def configure_module_logging(module_name: str, module_type: str = 'default') -> logging.Logger:
- """
- Configure logging for a specific module with environment-based levels.
-
- Args:
- module_name: Name of the module
- module_type: Type of module ('router', 'service', or 'default')
-
- Returns:
- logging.Logger: Configured logger instance
- """
- return get_logger(module_name, module_type)
-
-
-def standardize_error_response(
- error: Exception,
- operation: str,
- resource_id: str = None,
- user_id: str = None
-) -> HTTPException:
- """
- Standardize error responses across the application.
-
- Args:
- error: The original exception
- operation: Description of the operation that failed
- resource_id: Optional resource identifier
- user_id: Optional user identifier
-
- Returns:
- HTTPException: Standardized HTTP exception
- """
- error_msg = f"Failed to {operation}"
- if resource_id:
- error_msg += f" resource {resource_id}"
- if user_id:
- error_msg += f" for user {user_id}"
-
- # Log the original error for debugging
- logger = get_logger(__name__, 'default')
- logger.exception(f"{error_msg}: {str(error)}")
-
- return HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=error_msg
- )
-
-
-def log_operation(
- logger: logging.Logger,
- level: str,
- message: str,
- **kwargs
-) -> None:
- """
- Standardize logging across services.
-
- Args:
- logger: Logger instance to use
- level: Log level (debug, info, warning, error, exception)
- message: Log message
- **kwargs: Additional context for logging
- """
- log_method = getattr(logger, level.lower())
- if kwargs:
- log_method(message, extra=kwargs)
- else:
- log_method(message)
\ No newline at end of file
diff --git a/backend/main.py b/backend/main.py
index 2c312b9d..8806256f 100644
--- a/backend/main.py
+++ b/backend/main.py
@@ -1,33 +1,30 @@
# uvicorn main:app --reload
-
-# Standard library imports
import os
-from contextlib import asynccontextmanager
-import logging
-
-# Third-party imports
-from fastapi import FastAPI, HTTPException
+from fastapi import FastAPI, HTTPException, Depends, status
from fastapi.responses import FileResponse
from fastapi.middleware.cors import CORSMiddleware
-
-# Internal imports
-from app.routers import emails_router, summaries_router, auth_router, user_router
-from app.services.database.connection import DatabaseConnection
-
-# -------------------------------------------------------------------------
-# Logging Configuration
-# -------------------------------------------------------------------------
+from starlette.concurrency import run_in_threadpool
+from contextlib import asynccontextmanager
+import logging
# Configure logging
logging.basicConfig(level=logging.INFO)
# Reduce verbosity of httpx and httpcore
logging.getLogger("httpx").setLevel(logging.WARNING)
logging.getLogger("httpcore").setLevel(logging.WARNING)
-logger = logging.getLogger(__name__)
-# -------------------------------------------------------------------------
-# Database Lifecycle Management
-# -------------------------------------------------------------------------
+from app.routers import emails_router, summaries_router, auth_router, user_router
+from app.services.database.connection import DatabaseConnection
+from app.models import EmailSchema, SummarySchema, UserSchema
+
+
+# from app.models.user_model import User
+
+@asynccontextmanager
+async def lifespan(app: FastAPI):
+ await startup_db_client()
+ yield
+ await shutdown_db_client()
async def startup_db_client():
"""
@@ -57,16 +54,6 @@ async def shutdown_db_client():
except Exception as e:
raise RuntimeError("Failed to close database connection") from e
-@asynccontextmanager
-async def lifespan(app: FastAPI):
- await startup_db_client()
- yield
- await shutdown_db_client()
-
-# -------------------------------------------------------------------------
-# FastAPI Application Setup
-# -------------------------------------------------------------------------
-
app = FastAPI(
title="Email Essence API",
description="API for the Email Essence application",
@@ -76,9 +63,6 @@ async def lifespan(app: FastAPI):
lifespan=lifespan
)
-# -------------------------------------------------------------------------
-# Middleware Configuration
-# -------------------------------------------------------------------------
# Configure CORS
app.add_middleware(
@@ -100,10 +84,15 @@ async def lifespan(app: FastAPI):
allow_headers=["*"], # Allows all headers
)
-# -------------------------------------------------------------------------
-# API Route Handlers
-# -------------------------------------------------------------------------
+logger = logging.getLogger(__name__)
+# Register routers
+app.include_router(auth_router, prefix="/auth", tags=["Auth"])
+app.include_router(user_router, prefix="/user", tags=["User"])
+app.include_router(emails_router, prefix="/emails", tags=["Emails"])
+app.include_router(summaries_router, prefix="/summaries", tags=["Summaries"])
+
+# Root route handler
@app.get("/", tags=["Root"])
async def root():
"""
@@ -125,7 +114,7 @@ async def root():
"status": "online"
}
-# Serve favicon.ico from root directory - only served to swagger UI
+# Serve favicon.ico from root directory
@app.get('/favicon.ico')
async def favicon():
root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@@ -183,13 +172,4 @@ async def health_check():
health_status["components"]["google_api"] = f"error: {str(e)}"
health_status["status"] = "unhealthy"
- return health_status
-
-# -------------------------------------------------------------------------
-# Router Registration
-# -------------------------------------------------------------------------
-
-app.include_router(auth_router, prefix="/auth", tags=["Auth"])
-app.include_router(user_router, prefix="/user", tags=["User"])
-app.include_router(emails_router, prefix="/emails", tags=["Emails"])
-app.include_router(summaries_router, prefix="/summaries", tags=["Summaries"])
\ No newline at end of file
+ return health_status
\ No newline at end of file
diff --git a/frontend/src/authentication/authenticate.js b/frontend/src/authentication/authenticate.js
index 598b85b7..af9ce45c 100644
--- a/frontend/src/authentication/authenticate.js
+++ b/frontend/src/authentication/authenticate.js
@@ -5,13 +5,7 @@ export const authenticate = async () => {
window.location.href = `${baseUrl}/auth/login?redirect_uri=${redirect_uri}`;
};
-/**
- * Handles the OAuth callback after authentication.
- * Parses the auth state from the URL hash, verifies authentication, and stores the token.
- * Navigates to the error page if authentication fails.
- * @async
- * @returns {PromiseTheme