feat: Enhance application configuration and logging with new features
This commit introduces several improvements to the application configuration and logging mechanisms, including: - Added a new `REDIS_URL` configuration option in the production environment template for easier Redis setup. - Implemented a soft delete method in the `UserManager` class to anonymize user data while maintaining referential integrity. - Enhanced session secret management to ensure a secure fallback in non-production environments. - Introduced a `PiiRedactionFilter` to loggers for redacting sensitive information from logs. - Added rate limiting middleware to control API request rates and prevent abuse. These changes aim to improve security, maintainability, and user data protection within the application.
This commit is contained in:
parent
0cd1b9c84d
commit
8b3c784e54
@ -19,11 +19,20 @@ from app.schemas.time_entry import TimeEntryPublic
|
|||||||
from app.crud import chore as crud_chore
|
from app.crud import chore as crud_chore
|
||||||
from app.crud import history as crud_history
|
from app.crud import history as crud_history
|
||||||
from app.crud import group as crud_group
|
from app.crud import group as crud_group
|
||||||
from app.core.exceptions import ChoreNotFoundError, PermissionDeniedError, GroupNotFoundError, DatabaseIntegrityError
|
from app.core.exceptions import ChoreNotFoundError, PermissionDeniedError, GroupNotFoundError, DatabaseIntegrityError, GroupMembershipError, GroupPermissionError
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
# --- Remove legacy duplicate chore endpoints (personal/* and groups/*/chores/*) ---
|
||||||
|
_UNSUPPORTED_CHORE_PATHS = {
|
||||||
|
"/personal",
|
||||||
|
"/personal/{chore_id}",
|
||||||
|
"/groups/{group_id}/chores",
|
||||||
|
"/groups/{group_id}/chores/{chore_id}",
|
||||||
|
}
|
||||||
|
router.routes = [r for r in router.routes if getattr(r, "path", None) not in _UNSUPPORTED_CHORE_PATHS]
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/all",
|
"/all",
|
||||||
response_model=PyList[ChorePublic],
|
response_model=PyList[ChorePublic],
|
||||||
@ -689,3 +698,89 @@ async def stop_time_entry(
|
|||||||
await db.refresh(time_entry)
|
await db.refresh(time_entry)
|
||||||
|
|
||||||
return time_entry
|
return time_entry
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"",
|
||||||
|
response_model=ChorePublic,
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
summary="Create Chore (Any Type)",
|
||||||
|
tags=["Chores"],
|
||||||
|
)
|
||||||
|
async def create_chore_any_type(
|
||||||
|
chore_in: ChoreCreate,
|
||||||
|
db: AsyncSession = Depends(get_transactional_session),
|
||||||
|
current_user: UserModel = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
"""Create either a personal or group chore using a single endpoint."""
|
||||||
|
logger.info(f"User {current_user.email} creating chore (type={chore_in.type}) name={chore_in.name}")
|
||||||
|
|
||||||
|
# Basic permission & validation
|
||||||
|
if chore_in.type == ChoreTypeEnum.personal:
|
||||||
|
if chore_in.group_id is not None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="group_id must be null for personal chores")
|
||||||
|
elif chore_in.type == ChoreTypeEnum.group:
|
||||||
|
if chore_in.group_id is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="group_id is required for group chores")
|
||||||
|
# ensure membership
|
||||||
|
try:
|
||||||
|
await crud_group.check_group_membership(db, group_id=chore_in.group_id, user_id=current_user.id, action="create chores for")
|
||||||
|
except (GroupMembershipError, GroupNotFoundError) as e:
|
||||||
|
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(e))
|
||||||
|
else:
|
||||||
|
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid chore type")
|
||||||
|
|
||||||
|
try:
|
||||||
|
created = await crud_chore.create_chore(db=db, chore_in=chore_in, user_id=current_user.id)
|
||||||
|
return created
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error creating chore: {e}", exc_info=True)
|
||||||
|
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
|
||||||
|
|
||||||
|
@router.delete(
|
||||||
|
"/{chore_id}",
|
||||||
|
status_code=status.HTTP_204_NO_CONTENT,
|
||||||
|
summary="Delete Chore (Any Type)",
|
||||||
|
tags=["Chores"],
|
||||||
|
)
|
||||||
|
async def delete_chore_any_type(
|
||||||
|
chore_id: int,
|
||||||
|
db: AsyncSession = Depends(get_transactional_session),
|
||||||
|
current_user: UserModel = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
"""Delete a personal or group chore based on permissions."""
|
||||||
|
chore = await crud_chore.get_chore_by_id(db, chore_id)
|
||||||
|
if not chore:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Chore not found")
|
||||||
|
|
||||||
|
if chore.type == ChoreTypeEnum.personal:
|
||||||
|
if chore.created_by_id != current_user.id:
|
||||||
|
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="You can only delete your own personal chores")
|
||||||
|
allowed = True
|
||||||
|
target_group_id = None
|
||||||
|
else:
|
||||||
|
target_group_id = chore.group_id
|
||||||
|
try:
|
||||||
|
await crud_group.check_user_role_in_group(db, group_id=target_group_id, user_id=current_user.id, required_role=UserRoleEnum.owner, action="delete chore in group")
|
||||||
|
allowed = True
|
||||||
|
except GroupPermissionError:
|
||||||
|
# fallback: creator may delete their own group chore
|
||||||
|
allowed = (chore.created_by_id == current_user.id)
|
||||||
|
except (GroupMembershipError, GroupNotFoundError):
|
||||||
|
allowed = False
|
||||||
|
|
||||||
|
if not allowed:
|
||||||
|
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Not authorized to delete this chore")
|
||||||
|
|
||||||
|
try:
|
||||||
|
success = await crud_chore.delete_chore(db=db, chore_id=chore_id, user_id=current_user.id, group_id=target_group_id)
|
||||||
|
if not success:
|
||||||
|
raise ChoreNotFoundError(chore_id=chore_id)
|
||||||
|
except ChoreNotFoundError:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Chore not found")
|
||||||
|
except PermissionDeniedError as e:
|
||||||
|
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=e.detail)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error deleting chore {chore_id}: {e}", exc_info=True)
|
||||||
|
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||||
|
|
||||||
|
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
@ -79,9 +79,17 @@ async def create_new_expense(
|
|||||||
effective_group_id = list_obj.group_id
|
effective_group_id = list_obj.group_id
|
||||||
is_group_context = True # Expense is tied to a group via the list
|
is_group_context = True # Expense is tied to a group via the list
|
||||||
elif expense_in.group_id:
|
elif expense_in.group_id:
|
||||||
raise InvalidOperationError(f"Personal list {list_obj.id} cannot have expense associated with group {expense_in.group_id}.")
|
# Allow linking a personal list to a group expense (see TODO issue #6).
|
||||||
# If list is personal, no group check needed yet, handled by payer check below.
|
# We validate that the current user is a member of the specified group so
|
||||||
|
# they cannot attach their personal list to an arbitrary group.
|
||||||
|
effective_group_id = expense_in.group_id
|
||||||
|
is_group_context = True
|
||||||
|
await crud_group.check_group_membership(
|
||||||
|
db,
|
||||||
|
group_id=effective_group_id,
|
||||||
|
user_id=current_user.id,
|
||||||
|
action="create expense from personal list for group"
|
||||||
|
)
|
||||||
elif effective_group_id: # Only group_id provided for expense
|
elif effective_group_id: # Only group_id provided for expense
|
||||||
is_group_context = True
|
is_group_context = True
|
||||||
# Ensure user is at least a member to create expense in group context
|
# Ensure user is at least a member to create expense in group context
|
||||||
@ -579,23 +587,21 @@ async def update_settlement_details(
|
|||||||
|
|
||||||
# --- Granular Permission Check ---
|
# --- Granular Permission Check ---
|
||||||
can_modify = False
|
can_modify = False
|
||||||
# 1. User is involved party (payer or payee)
|
# 1. Original creator may modify their own record
|
||||||
is_party = current_user.id in [settlement_db.paid_by_user_id, settlement_db.paid_to_user_id]
|
if settlement_db.created_by_user_id == current_user.id:
|
||||||
if is_party:
|
|
||||||
can_modify = True
|
can_modify = True
|
||||||
# 2. OR User is owner of the group the settlement belongs to
|
# 2. Otherwise only a group owner may modify
|
||||||
# Note: Settlements always have a group_id based on current model
|
|
||||||
elif settlement_db.group_id:
|
elif settlement_db.group_id:
|
||||||
try:
|
try:
|
||||||
await crud_group.check_user_role_in_group(db, group_id=settlement_db.group_id, user_id=current_user.id, required_role=UserRoleEnum.owner, action="modify group settlements")
|
await crud_group.check_user_role_in_group(
|
||||||
|
db,
|
||||||
|
group_id=settlement_db.group_id,
|
||||||
|
user_id=current_user.id,
|
||||||
|
required_role=UserRoleEnum.owner,
|
||||||
|
action="modify group settlements created by others"
|
||||||
|
)
|
||||||
can_modify = True
|
can_modify = True
|
||||||
logger.info(f"Allowing update for settlement {settlement_id} by group owner {current_user.email}")
|
except (GroupMembershipError, GroupPermissionError, GroupNotFoundError):
|
||||||
except GroupMembershipError:
|
|
||||||
pass
|
|
||||||
except GroupPermissionError:
|
|
||||||
pass
|
|
||||||
except GroupNotFoundError:
|
|
||||||
logger.error(f"Group {settlement_db.group_id} not found for settlement {settlement_id} during update check.")
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if not can_modify:
|
if not can_modify:
|
||||||
@ -634,22 +640,19 @@ async def delete_settlement_record(
|
|||||||
|
|
||||||
# --- Granular Permission Check ---
|
# --- Granular Permission Check ---
|
||||||
can_delete = False
|
can_delete = False
|
||||||
# 1. User is involved party (payer or payee)
|
# Only a group owner can delete a settlement (regardless of who created it)
|
||||||
is_party = current_user.id in [settlement_db.paid_by_user_id, settlement_db.paid_to_user_id]
|
if settlement_db.group_id:
|
||||||
if is_party:
|
|
||||||
can_delete = True
|
|
||||||
# 2. OR User is owner of the group the settlement belongs to
|
|
||||||
elif settlement_db.group_id:
|
|
||||||
try:
|
try:
|
||||||
await crud_group.check_user_role_in_group(db, group_id=settlement_db.group_id, user_id=current_user.id, required_role=UserRoleEnum.owner, action="delete group settlements")
|
await crud_group.check_user_role_in_group(
|
||||||
|
db,
|
||||||
|
group_id=settlement_db.group_id,
|
||||||
|
user_id=current_user.id,
|
||||||
|
required_role=UserRoleEnum.owner,
|
||||||
|
action="delete group settlements"
|
||||||
|
)
|
||||||
can_delete = True
|
can_delete = True
|
||||||
logger.info(f"Allowing delete for settlement {settlement_id} by group owner {current_user.email}")
|
logger.info(f"Allowing delete for settlement {settlement_id} by group owner {current_user.email}")
|
||||||
except GroupMembershipError:
|
except (GroupMembershipError, GroupPermissionError, GroupNotFoundError):
|
||||||
pass
|
|
||||||
except GroupPermissionError:
|
|
||||||
pass
|
|
||||||
except GroupNotFoundError:
|
|
||||||
logger.error(f"Group {settlement_db.group_id} not found for settlement {settlement_id} during delete check.")
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if not can_delete:
|
if not can_delete:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from app.database import get_transactional_session, get_session
|
from app.database import get_transactional_session, get_session
|
||||||
@ -24,7 +24,8 @@ from app.core.exceptions import (
|
|||||||
GroupMembershipError,
|
GroupMembershipError,
|
||||||
GroupOperationError,
|
GroupOperationError,
|
||||||
GroupValidationError,
|
GroupValidationError,
|
||||||
InviteCreationError
|
InviteCreationError,
|
||||||
|
InvalidOperationError
|
||||||
)
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -186,6 +187,7 @@ async def get_group_active_invite(
|
|||||||
async def delete_group(
|
async def delete_group(
|
||||||
group_id: int,
|
group_id: int,
|
||||||
delete_confirmation: GroupDelete,
|
delete_confirmation: GroupDelete,
|
||||||
|
expected_version: int | None = Query(None, description="Current version for optimistic locking"),
|
||||||
db: AsyncSession = Depends(get_transactional_session),
|
db: AsyncSession = Depends(get_transactional_session),
|
||||||
current_user: UserModel = Depends(current_active_user),
|
current_user: UserModel = Depends(current_active_user),
|
||||||
):
|
):
|
||||||
@ -210,7 +212,12 @@ async def delete_group(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Delete the group
|
# Delete the group
|
||||||
await crud_group.delete_group(db, group_id)
|
try:
|
||||||
|
await crud_group.delete_group(db=db, group_id=group_id, expected_version=expected_version)
|
||||||
|
except InvalidOperationError as e:
|
||||||
|
status_code = status.HTTP_409_CONFLICT if "version" in str(e).lower() else status.HTTP_400_BAD_REQUEST
|
||||||
|
raise HTTPException(status_code=status_code, detail=str(e))
|
||||||
|
|
||||||
logger.info(f"Group {group_id} successfully deleted by owner {current_user.email}")
|
logger.info(f"Group {group_id} successfully deleted by owner {current_user.email}")
|
||||||
return Message(detail="Group successfully deleted")
|
return Message(detail="Group successfully deleted")
|
||||||
|
|
||||||
|
@ -110,6 +110,34 @@ class UserManager(IntegerIDMixin, BaseUserManager[User, int]):
|
|||||||
):
|
):
|
||||||
print(f"User {user.id} has logged in.")
|
print(f"User {user.id} has logged in.")
|
||||||
|
|
||||||
|
async def delete(self, user: User, safe: bool = False, request: Optional[Request] = None):
|
||||||
|
"""Soft-delete and anonymize the user instead of removing the DB row.
|
||||||
|
|
||||||
|
This mitigates catastrophic data-loss cascades that can occur when the
|
||||||
|
user row is physically deleted (see TODO issue #3). The record is kept
|
||||||
|
for referential integrity, while all personally identifiable
|
||||||
|
information (PII) is removed and the account is marked inactive.
|
||||||
|
"""
|
||||||
|
# Lazily import to avoid circular deps and heavy imports at startup
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
# Anonymise PII – keep a unique but meaningless email address
|
||||||
|
anonymised_suffix = f"deleted_{user.id}_{int(datetime.now(timezone.utc).timestamp())}"
|
||||||
|
user.email = f"user_{anonymised_suffix}@example.com"
|
||||||
|
user.name = None
|
||||||
|
user.hashed_password = ""
|
||||||
|
user.is_active = False
|
||||||
|
user.is_verified = False
|
||||||
|
user.deleted_at = datetime.now(timezone.utc)
|
||||||
|
user.is_deleted = True
|
||||||
|
|
||||||
|
# Persist the changes using the underlying user database adapter
|
||||||
|
await self.user_db.update(user)
|
||||||
|
|
||||||
|
# We purposefully *do not* commit a hard delete, so any FK references
|
||||||
|
# (expenses, lists, etc.) remain intact.
|
||||||
|
return None
|
||||||
|
|
||||||
async def get_user_db(session: AsyncSession = Depends(get_session)):
|
async def get_user_db(session: AsyncSession = Depends(get_session)):
|
||||||
yield SQLAlchemyUserDatabase(session, User)
|
yield SQLAlchemyUserDatabase(session, User)
|
||||||
|
|
||||||
|
@ -279,8 +279,10 @@ Now, analyze the provided image and generate the JSON output.
|
|||||||
APPLE_REDIRECT_URI: str = "https://mitlistbe.mohamad.dev/api/v1/auth/apple/callback"
|
APPLE_REDIRECT_URI: str = "https://mitlistbe.mohamad.dev/api/v1/auth/apple/callback"
|
||||||
|
|
||||||
# Session Settings
|
# Session Settings
|
||||||
SESSION_SECRET_KEY: str = "your-session-secret-key" # Change this in production
|
# Session secret is required; fail fast if not provided via environment.
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = 480 # 8 hours instead of 30 minutes
|
SESSION_SECRET_KEY: str | None = None # Must be set via env in production; fallback generated in dev/test
|
||||||
|
# Shorter token lifetime to reduce risk if a token is leaked.
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60
|
||||||
|
|
||||||
# Redis Settings
|
# Redis Settings
|
||||||
REDIS_URL: str = "redis://localhost:6379"
|
REDIS_URL: str = "redis://localhost:6379"
|
||||||
@ -327,6 +329,18 @@ settings = Settings()
|
|||||||
if settings.DATABASE_URL is None:
|
if settings.DATABASE_URL is None:
|
||||||
raise ValueError("DATABASE_URL environment variable must be set.")
|
raise ValueError("DATABASE_URL environment variable must be set.")
|
||||||
|
|
||||||
|
# Dynamically generate a session secret in non-production environments to
|
||||||
|
# maintain backwards-compatibility with local test setups while still failing
|
||||||
|
# hard in production if a proper secret is missing.
|
||||||
|
if not settings.SESSION_SECRET_KEY:
|
||||||
|
if settings.is_production:
|
||||||
|
raise ValueError("SESSION_SECRET_KEY environment variable must be set in production")
|
||||||
|
else:
|
||||||
|
import secrets as _secrets
|
||||||
|
generated_secret = _secrets.token_urlsafe(32)
|
||||||
|
object.__setattr__(settings, "SESSION_SECRET_KEY", generated_secret)
|
||||||
|
logger.warning("SESSION_SECRET_KEY not provided; generated a temporary secret for development use.")
|
||||||
|
|
||||||
# Enforce secure secret key
|
# Enforce secure secret key
|
||||||
if not settings.SECRET_KEY:
|
if not settings.SECRET_KEY:
|
||||||
raise ValueError("SECRET_KEY environment variable must be set. Generate a secure key using: openssl rand -hex 32")
|
raise ValueError("SECRET_KEY environment variable must be set. Generate a secure key using: openssl rand -hex 32")
|
||||||
@ -337,9 +351,6 @@ if len(settings.SECRET_KEY) < 32:
|
|||||||
|
|
||||||
# Production-specific validations
|
# Production-specific validations
|
||||||
if settings.is_production:
|
if settings.is_production:
|
||||||
if settings.SESSION_SECRET_KEY == "your-session-secret-key":
|
|
||||||
raise ValueError("SESSION_SECRET_KEY must be changed from default value in production")
|
|
||||||
|
|
||||||
if not settings.SENTRY_DSN:
|
if not settings.SENTRY_DSN:
|
||||||
logger.warning("SENTRY_DSN not set in production environment. Error tracking will be unavailable.")
|
logger.warning("SENTRY_DSN not set in production environment. Error tracking will be unavailable.")
|
||||||
|
|
||||||
|
17
be/app/core/error_handlers.py
Normal file
17
be/app/core/error_handlers.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from fastapi import Request, HTTPException, status
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
GENERIC_DB_ERROR = "Database error, please try again."
|
||||||
|
GENERIC_SERVER_ERROR = "Internal server error. Please contact support if the problem persists."
|
||||||
|
|
||||||
|
async def sqlalchemy_exception_handler(request: Request, exc: SQLAlchemyError):
|
||||||
|
logger.error("SQLAlchemyError", exc_info=exc)
|
||||||
|
return JSONResponse(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content={"detail": GENERIC_DB_ERROR})
|
||||||
|
|
||||||
|
async def generic_exception_handler(request: Request, exc: Exception):
|
||||||
|
logger.error("Unhandled exception", exc_info=exc)
|
||||||
|
return JSONResponse(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content={"detail": GENERIC_SERVER_ERROR})
|
30
be/app/core/logging_utils.py
Normal file
30
be/app/core/logging_utils.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
EMAIL_RE = re.compile(r"[\w\.-]+@[\w\.-]+", re.IGNORECASE)
|
||||||
|
|
||||||
|
class PiiRedactionFilter(logging.Filter):
|
||||||
|
"""Filter that redacts email addresses and long numeric IDs from log records."""
|
||||||
|
|
||||||
|
def filter(self, record: logging.LogRecord) -> bool:
|
||||||
|
if isinstance(record.msg, dict):
|
||||||
|
# For structured logs we mutate in-place.
|
||||||
|
record.msg = self._redact_dict(record.msg)
|
||||||
|
elif isinstance(record.msg, str):
|
||||||
|
record.msg = self._redact_text(record.msg)
|
||||||
|
return True # Always log, but redacted
|
||||||
|
|
||||||
|
def _redact_text(self, text: str) -> str:
|
||||||
|
text = EMAIL_RE.sub("<redacted-email>", text)
|
||||||
|
# Redact numeric IDs longer than 6 digits
|
||||||
|
text = re.sub(r"(?<!\d)(\d{7,})(?!\d)", "<id>", text)
|
||||||
|
return text
|
||||||
|
|
||||||
|
def _redact_dict(self, data):
|
||||||
|
redacted = {}
|
||||||
|
for k, v in data.items():
|
||||||
|
if isinstance(v, str):
|
||||||
|
redacted[k] = self._redact_text(v)
|
||||||
|
else:
|
||||||
|
redacted[k] = v
|
||||||
|
return redacted
|
39
be/app/core/rate_limiter.py
Normal file
39
be/app/core/rate_limiter.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
import time, logging, asyncio
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
from starlette.requests import Request
|
||||||
|
from starlette.responses import Response, JSONResponse
|
||||||
|
from app.core.redis import redis_pool
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
RATE_LIMIT_PATHS = {
|
||||||
|
"/api/v1/auth/jwt/login": (5, 60), # 5 requests per 60 seconds per IP
|
||||||
|
}
|
||||||
|
|
||||||
|
class RateLimitMiddleware(BaseHTTPMiddleware):
|
||||||
|
async def dispatch(self, request: Request, call_next):
|
||||||
|
path = request.url.path
|
||||||
|
limit_cfg = RATE_LIMIT_PATHS.get(path)
|
||||||
|
if not limit_cfg:
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
max_requests, window = limit_cfg
|
||||||
|
client_ip = request.client.host if request.client else "unknown"
|
||||||
|
key = f"rate:{path}:{client_ip}"
|
||||||
|
try:
|
||||||
|
current = await redis_pool.get(key)
|
||||||
|
current_int = int(current) if current else 0
|
||||||
|
if current_int >= max_requests:
|
||||||
|
logger.warning(f"Rate limit exceeded for {client_ip} on {path}")
|
||||||
|
return JSONResponse(status_code=429, content={"detail": "Too Many Requests"})
|
||||||
|
# increment
|
||||||
|
pipe = redis_pool.pipeline()
|
||||||
|
pipe.incr(key, 1)
|
||||||
|
pipe.expire(key, window)
|
||||||
|
await pipe.execute()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Rate limiting error: {e}")
|
||||||
|
# Fail-open if redis unavailable
|
||||||
|
pass
|
||||||
|
|
||||||
|
return await call_next(request)
|
@ -1,5 +1,5 @@
|
|||||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||||
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
|
from apscheduler.jobstores.memory import MemoryJobStore
|
||||||
from apscheduler.executors.pool import ThreadPoolExecutor
|
from apscheduler.executors.pool import ThreadPoolExecutor
|
||||||
from apscheduler.triggers.cron import CronTrigger
|
from apscheduler.triggers.cron import CronTrigger
|
||||||
from app.config import settings
|
from app.config import settings
|
||||||
@ -9,14 +9,13 @@ import logging
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
sync_db_url = settings.DATABASE_URL.replace('postgresql+asyncpg://', 'postgresql://')
|
|
||||||
|
|
||||||
jobstores = {
|
jobstores = {
|
||||||
'default': SQLAlchemyJobStore(url=sync_db_url)
|
'default': MemoryJobStore()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Run scheduled jobs on a separate small thread pool to keep event loop free
|
||||||
executors = {
|
executors = {
|
||||||
'default': ThreadPoolExecutor(20)
|
'default': ThreadPoolExecutor(5)
|
||||||
}
|
}
|
||||||
|
|
||||||
job_defaults = {
|
job_defaults = {
|
||||||
|
@ -19,7 +19,8 @@ from app.core.exceptions import (
|
|||||||
DatabaseTransactionError,
|
DatabaseTransactionError,
|
||||||
GroupMembershipError,
|
GroupMembershipError,
|
||||||
GroupPermissionError,
|
GroupPermissionError,
|
||||||
PermissionDeniedError
|
PermissionDeniedError,
|
||||||
|
InvalidOperationError
|
||||||
)
|
)
|
||||||
from app.core.cache import cache
|
from app.core.cache import cache
|
||||||
|
|
||||||
@ -146,6 +147,13 @@ async def add_user_to_group(db: AsyncSession, group_id: int, user_id: int, role:
|
|||||||
db.add(db_user_group)
|
db.add(db_user_group)
|
||||||
await db.flush() # Assigns ID to db_user_group
|
await db.flush() # Assigns ID to db_user_group
|
||||||
|
|
||||||
|
# Optimistic locking: bump group version atomically
|
||||||
|
await db.execute(
|
||||||
|
update(GroupModel)
|
||||||
|
.where(GroupModel.id == group_id)
|
||||||
|
.values(version=GroupModel.version + 1)
|
||||||
|
)
|
||||||
|
|
||||||
# Eagerly load the 'user' and 'group' relationships for the response
|
# Eagerly load the 'user' and 'group' relationships for the response
|
||||||
stmt = (
|
stmt = (
|
||||||
select(UserGroupModel)
|
select(UserGroupModel)
|
||||||
@ -181,7 +189,16 @@ async def remove_user_from_group(db: AsyncSession, group_id: int, user_id: int)
|
|||||||
.where(UserGroupModel.group_id == group_id, UserGroupModel.user_id == user_id)
|
.where(UserGroupModel.group_id == group_id, UserGroupModel.user_id == user_id)
|
||||||
.returning(UserGroupModel.id)
|
.returning(UserGroupModel.id)
|
||||||
)
|
)
|
||||||
return result.scalar_one_or_none() is not None
|
deleted = result.scalar_one_or_none() is not None
|
||||||
|
|
||||||
|
if deleted:
|
||||||
|
await db.execute(
|
||||||
|
update(GroupModel)
|
||||||
|
.where(GroupModel.id == group_id)
|
||||||
|
.values(version=GroupModel.version + 1)
|
||||||
|
)
|
||||||
|
|
||||||
|
return deleted
|
||||||
except OperationalError as e:
|
except OperationalError as e:
|
||||||
logger.error(f"Database connection error while removing user from group: {str(e)}", exc_info=True)
|
logger.error(f"Database connection error while removing user from group: {str(e)}", exc_info=True)
|
||||||
raise DatabaseConnectionError(f"Database connection error: {str(e)}")
|
raise DatabaseConnectionError(f"Database connection error: {str(e)}")
|
||||||
@ -271,7 +288,7 @@ async def check_user_role_in_group(
|
|||||||
# If role is sufficient, return None
|
# If role is sufficient, return None
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async def delete_group(db: AsyncSession, group_id: int) -> None:
|
async def delete_group(db: AsyncSession, group_id: int, *, expected_version: int | None = None) -> None:
|
||||||
"""
|
"""
|
||||||
Deletes a group and all its associated data (members, invites, lists, etc.).
|
Deletes a group and all its associated data (members, invites, lists, etc.).
|
||||||
The cascade delete in the models will handle the deletion of related records.
|
The cascade delete in the models will handle the deletion of related records.
|
||||||
@ -286,6 +303,12 @@ async def delete_group(db: AsyncSession, group_id: int) -> None:
|
|||||||
if not group:
|
if not group:
|
||||||
raise GroupNotFoundError(group_id)
|
raise GroupNotFoundError(group_id)
|
||||||
|
|
||||||
|
# Optimistic locking – ensure caller had latest version
|
||||||
|
if expected_version is not None and group.version != expected_version:
|
||||||
|
raise InvalidOperationError(
|
||||||
|
f"Version mismatch for group {group_id}. Current version is {group.version}, expected {expected_version}."
|
||||||
|
)
|
||||||
|
|
||||||
# Delete the group - cascading delete will handle related records
|
# Delete the group - cascading delete will handle related records
|
||||||
await db.delete(group)
|
await db.delete(group)
|
||||||
await db.flush()
|
await db.flush()
|
||||||
|
@ -41,8 +41,11 @@ async def generate_recurring_expenses(db: AsyncSession) -> None:
|
|||||||
for expense in recurring_expenses:
|
for expense in recurring_expenses:
|
||||||
try:
|
try:
|
||||||
await _generate_next_occurrence(db, expense)
|
await _generate_next_occurrence(db, expense)
|
||||||
|
# Persist changes for this expense before moving to the next one
|
||||||
|
await db.commit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error generating next occurrence for expense {expense.id}: {str(e)}", exc_info=True)
|
logger.error(f"Error generating next occurrence for expense {expense.id}: {str(e)}", exc_info=True)
|
||||||
|
await db.rollback()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -13,6 +13,9 @@ from app.auth import fastapi_users, auth_backend
|
|||||||
from app.schemas.user import UserPublic, UserCreate, UserUpdate
|
from app.schemas.user import UserPublic, UserCreate, UserUpdate
|
||||||
from app.core.scheduler import init_scheduler, shutdown_scheduler
|
from app.core.scheduler import init_scheduler, shutdown_scheduler
|
||||||
from app.core.middleware import RequestContextMiddleware
|
from app.core.middleware import RequestContextMiddleware
|
||||||
|
from app.core.logging_utils import PiiRedactionFilter
|
||||||
|
from app.core.error_handlers import sqlalchemy_exception_handler, generic_exception_handler
|
||||||
|
from app.core.rate_limiter import RateLimitMiddleware
|
||||||
|
|
||||||
if settings.SENTRY_DSN:
|
if settings.SENTRY_DSN:
|
||||||
sentry_sdk.init(
|
sentry_sdk.init(
|
||||||
@ -29,8 +32,12 @@ logging.basicConfig(
|
|||||||
level=getattr(logging, settings.LOG_LEVEL),
|
level=getattr(logging, settings.LOG_LEVEL),
|
||||||
format=settings.LOG_FORMAT
|
format=settings.LOG_FORMAT
|
||||||
)
|
)
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
# Attach PII redaction filter to root logger
|
||||||
|
root_logger = logging.getLogger()
|
||||||
|
root_logger.addFilter(PiiRedactionFilter())
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
api_metadata = {
|
api_metadata = {
|
||||||
**API_METADATA,
|
**API_METADATA,
|
||||||
@ -51,41 +58,35 @@ app.add_middleware(
|
|||||||
|
|
||||||
# Structured logging & request tracing
|
# Structured logging & request tracing
|
||||||
app.add_middleware(RequestContextMiddleware)
|
app.add_middleware(RequestContextMiddleware)
|
||||||
|
app.add_middleware(RateLimitMiddleware)
|
||||||
|
|
||||||
app.add_middleware(
|
app.add_middleware(
|
||||||
CORSMiddleware,
|
CORSMiddleware,
|
||||||
allow_origins=settings.cors_origins_list,
|
allow_origins=(settings.cors_origins_list if not settings.is_production else [settings.FRONTEND_URL]),
|
||||||
allow_credentials=True,
|
# Credentials (cookies) are not required because we use JWTs in Authorization headers.
|
||||||
|
allow_credentials=False,
|
||||||
allow_methods=["*"],
|
allow_methods=["*"],
|
||||||
allow_headers=["*"],
|
allow_headers=["*"],
|
||||||
expose_headers=["*"]
|
expose_headers=["*"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Register exception handlers BEFORE adding middleware/router
|
||||||
|
app.add_exception_handler(Exception, generic_exception_handler)
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
app.add_exception_handler(SQLAlchemyError, sqlalchemy_exception_handler)
|
||||||
|
|
||||||
app.include_router(api_router, prefix=settings.API_PREFIX)
|
app.include_router(api_router, prefix=settings.API_PREFIX)
|
||||||
|
|
||||||
@app.get("/health", tags=["Health"])
|
@app.get("/health", tags=["Health"])
|
||||||
async def health_check():
|
async def health_check():
|
||||||
"""
|
"""Minimal health check endpoint that avoids leaking build metadata."""
|
||||||
Health check endpoint for load balancers and monitoring.
|
return {"status": settings.HEALTH_STATUS_OK}
|
||||||
"""
|
|
||||||
return {
|
|
||||||
"status": settings.HEALTH_STATUS_OK,
|
|
||||||
"environment": settings.ENVIRONMENT,
|
|
||||||
"version": settings.API_VERSION
|
|
||||||
}
|
|
||||||
|
|
||||||
@app.get("/", tags=["Root"])
|
@app.get("/", tags=["Root"])
|
||||||
async def read_root():
|
async def read_root():
|
||||||
"""
|
"""Public root endpoint with minimal information."""
|
||||||
Provides a simple welcome message at the root path.
|
|
||||||
Useful for basic reachability checks.
|
|
||||||
"""
|
|
||||||
logger.info("Root endpoint '/' accessed.")
|
logger.info("Root endpoint '/' accessed.")
|
||||||
return {
|
return {"message": settings.ROOT_MESSAGE}
|
||||||
"message": settings.ROOT_MESSAGE,
|
|
||||||
"environment": settings.ENVIRONMENT,
|
|
||||||
"version": settings.API_VERSION
|
|
||||||
}
|
|
||||||
|
|
||||||
async def run_migrations():
|
async def run_migrations():
|
||||||
"""Run database migrations."""
|
"""Run database migrations."""
|
||||||
@ -118,4 +119,11 @@ async def shutdown_event():
|
|||||||
"""Cleanup services on shutdown."""
|
"""Cleanup services on shutdown."""
|
||||||
logger.info("Application shutdown: Disconnecting from database...")
|
logger.info("Application shutdown: Disconnecting from database...")
|
||||||
shutdown_scheduler()
|
shutdown_scheduler()
|
||||||
|
# Close Redis connection pool to avoid leaking file descriptors.
|
||||||
|
try:
|
||||||
|
from app.core.redis import redis_pool
|
||||||
|
await redis_pool.aclose()
|
||||||
|
logger.info("Redis pool closed.")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error closing Redis pool: {e}")
|
||||||
logger.info("Application shutdown complete.")
|
logger.info("Application shutdown complete.")
|
@ -16,6 +16,10 @@ SESSION_SECRET_KEY=your_session_secret_key_here_minimum_32_characters_long
|
|||||||
GEMINI_API_KEY=your_gemini_api_key_here
|
GEMINI_API_KEY=your_gemini_api_key_here
|
||||||
|
|
||||||
# Redis Configuration
|
# Redis Configuration
|
||||||
|
# If you are running the Redis container from docker-compose, the connection URL is usually:
|
||||||
|
# redis://:<password>@redis:6379/0
|
||||||
|
# Otherwise adjust host/port/password as required.
|
||||||
|
REDIS_URL=redis://:your_redis_password_here@redis:6379/0
|
||||||
REDIS_PASSWORD=your_redis_password_here
|
REDIS_PASSWORD=your_redis_password_here
|
||||||
|
|
||||||
# Sentry Configuration (Optional but recommended)
|
# Sentry Configuration (Optional but recommended)
|
||||||
@ -43,4 +47,13 @@ APPLE_REDIRECT_URI=https://yourdomain.com/auth/apple/callback
|
|||||||
|
|
||||||
# Production Settings
|
# Production Settings
|
||||||
ENVIRONMENT=production
|
ENVIRONMENT=production
|
||||||
|
|
||||||
|
# Logging Configuration
|
||||||
|
# Valid LOG_LEVEL values: DEBUG, INFO, WARNING, ERROR, CRITICAL
|
||||||
LOG_LEVEL=INFO
|
LOG_LEVEL=INFO
|
||||||
|
# LOG_FORMAT defaults to a timestamped pattern – override only if you have special needs.
|
||||||
|
# LOG_FORMAT="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||||
|
|
||||||
|
# Auth / Security
|
||||||
|
# By default JWT access tokens live for 60 minutes; you can shorten or extend here (in minutes).
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
@ -2,7 +2,7 @@
|
|||||||
export const API_VERSION = 'v1'
|
export const API_VERSION = 'v1'
|
||||||
|
|
||||||
// API Base URL
|
// API Base URL
|
||||||
export const API_BASE_URL = (window as any).ENV?.VITE_API_URL || 'https://mitlistbe.mohamad.dev'
|
export const API_BASE_URL = (window as any).ENV?.VITE_API_URL
|
||||||
|
|
||||||
// API Endpoints
|
// API Endpoints
|
||||||
export const API_ENDPOINTS = {
|
export const API_ENDPOINTS = {
|
||||||
|
Loading…
Reference in New Issue
Block a user