diff --git a/docs/user-guide/configuration/environment-variables.md b/docs/user-guide/configuration/environment-variables.md index 53fd3ca..bc09576 100644 --- a/docs/user-guide/configuration/environment-variables.md +++ b/docs/user-guide/configuration/environment-variables.md @@ -436,21 +436,6 @@ class SecurityHeadersMiddleware(BaseHTTPMiddleware): ## Logging Configuration -### Basic Logging Setup - -Configure logging in `src/app/core/logger.py`: - -```python -import logging -from logging.handlers import RotatingFileHandler - -# Set log level -LOGGING_LEVEL = logging.INFO - -# Configure file rotation -file_handler = RotatingFileHandler("logs/app.log", maxBytes=10485760, backupCount=5) # 10MB # Keep 5 backup files -``` - ### Structured Logging Use structured logging for better observability: @@ -469,15 +454,6 @@ structlog.configure( ) ``` -### Log Levels by Environment - -```python -# Environment-specific log levels -LOG_LEVELS = {"local": logging.DEBUG, "staging": logging.INFO, "production": logging.WARNING} - -LOGGING_LEVEL = LOG_LEVELS.get(settings.ENVIRONMENT, logging.INFO) -``` - ## Environment-Specific Configurations ### Development (.env.development) diff --git a/docs/user-guide/development.md b/docs/user-guide/development.md index 84fb187..fbe4d75 100644 --- a/docs/user-guide/development.md +++ b/docs/user-guide/development.md @@ -17,36 +17,37 @@ from sqlalchemy import String, ForeignKey from sqlalchemy.orm import Mapped, mapped_column, relationship from ..core.db.database import Base - - ``` + class Category(Base): - __tablename__ = "category" - - id: Mapped[int] = mapped_column( - "id", - autoincrement=True, - nullable=False, - unique=True, - primary_key=True, - init=False, - ) +__tablename__ = "category" - name: Mapped[str] = mapped_column(String(50)) - description: Mapped[str | None] = mapped_column(String(255), default=None) +``` +id: Mapped[int] = mapped_column( + "id", + autoincrement=True, + nullable=False, + unique=True, + primary_key=True, + init=False, +) +name: Mapped[str] = mapped_column(String(50)) +description: Mapped[str | None] = mapped_column(String(255), default=None) +``` class Post(Base): - __tablename__ = "post" +__tablename__ = "post" - id: Mapped[int] = mapped_column(primary_key=True) - title: Mapped[str] = mapped_column(String(100)) +``` +id: Mapped[int] = mapped_column(primary_key=True) +title: Mapped[str] = mapped_column(String(100)) - category_id: Mapped[int | None] = mapped_column( - ForeignKey("category.id"), - index=True, - default=None - ) +category_id: Mapped[int | None] = mapped_column( + ForeignKey("category.id"), + index=True, + default=None +) ``` #### 2. Create Pydantic Schemas @@ -70,14 +71,14 @@ class CategoryCreate(CategoryBase): class CategoryRead(CategoryBase): model_config = ConfigDict(from_attributes=True) - + id: int created_at: datetime class CategoryUpdate(BaseModel): model_config = ConfigDict(extra="forbid") - + name: Annotated[str | None, Field(min_length=1, max_length=50, default=None)] description: Annotated[str | None, Field(max_length=255, default=None)] @@ -88,7 +89,7 @@ class CategoryUpdateInternal(CategoryUpdate): class CategoryDelete(BaseModel): model_config = ConfigDict(extra="forbid") - + is_deleted: bool deleted_at: datetime ``` @@ -115,6 +116,7 @@ Add your new model to `src/app/models/__init__.py`: from .category import Category from .user import User from .post import Post + # ... other imports ``` @@ -186,12 +188,7 @@ async def read_category( category_id: int, db: Annotated[AsyncSession, Depends(async_get_db)], ): - db_category = await crud_categories.get( - db=db, - schema_to_select=CategoryRead, - id=category_id, - is_deleted=False - ) + db_category = await crud_categories.get(db=db, schema_to_select=CategoryRead, id=category_id, is_deleted=False) if not db_category: raise NotFoundException("Category not found") @@ -240,6 +237,7 @@ Add your router to `src/app/api/v1/__init__.py`: ```python from fastapi import APIRouter from .categories import router as categories_router + # ... other imports router = APIRouter() @@ -260,14 +258,14 @@ class CustomHeaderMiddleware(BaseHTTPMiddleware): async def dispatch(self, request: Request, call_next): # Pre-processing start_time = time.time() - + # Process request response = await call_next(request) - + # Post-processing process_time = time.time() - start_time response.headers["X-Process-Time"] = str(process_time) - + return response ``` @@ -306,19 +304,17 @@ TEST_DATABASE_URL = "postgresql+asyncpg://test_user:test_pass@localhost:5432/tes # Create test engine test_engine = create_async_engine(TEST_DATABASE_URL, echo=True) -TestSessionLocal = sessionmaker( - test_engine, class_=AsyncSession, expire_on_commit=False -) +TestSessionLocal = sessionmaker(test_engine, class_=AsyncSession, expire_on_commit=False) @pytest_asyncio.fixture async def async_session(): async with test_engine.begin() as conn: await conn.run_sync(Base.metadata.create_all) - + async with TestSessionLocal() as session: yield session - + async with test_engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) @@ -327,12 +323,12 @@ async def async_session(): async def async_client(async_session): def get_test_db(): return async_session - + app.dependency_overrides[async_get_db] = get_test_db - + async with AsyncClient(app=app, base_url="http://test") as client: yield client - + app.dependency_overrides.clear() ``` @@ -348,12 +344,7 @@ from src.app.models.user import User @pytest_asyncio.fixture async def test_user(async_session): - user = User( - name="Test User", - username="testuser", - email="test@example.com", - hashed_password="hashed_password" - ) + user = User(name="Test User", username="testuser", email="test@example.com", hashed_password="hashed_password") async_session.add(user) await async_session.commit() await async_session.refresh(user) @@ -375,16 +366,11 @@ from httpx import AsyncClient async def test_create_user(async_client: AsyncClient): - user_data = { - "name": "New User", - "username": "newuser", - "email": "new@example.com", - "password": "SecurePass123!" - } - + user_data = {"name": "New User", "username": "newuser", "email": "new@example.com", "password": "SecurePass123!"} + response = await async_client.post("/api/v1/users", json=user_data) assert response.status_code == 201 - + data = response.json() assert data["name"] == "New User" assert data["username"] == "newuser" @@ -394,7 +380,7 @@ async def test_create_user(async_client: AsyncClient): async def test_read_users(async_client: AsyncClient): response = await async_client.get("/api/v1/users") assert response.status_code == 200 - + data = response.json() assert "data" in data assert "total_count" in data @@ -410,23 +396,15 @@ from src.app.schemas.user import UserCreate async def test_crud_create_user(async_session): - user_data = UserCreate( - name="CRUD User", - username="cruduser", - email="crud@example.com", - password="password123" - ) - + user_data = UserCreate(name="CRUD User", username="cruduser", email="crud@example.com", password="password123") + user = await crud_users.create(db=async_session, object=user_data) assert user["name"] == "CRUD User" assert user["username"] == "cruduser" async def test_crud_get_user(async_session, test_user): - retrieved_user = await crud_users.get( - db=async_session, - id=test_user.id - ) + retrieved_user = await crud_users.get(db=async_session, id=test_user.id) assert retrieved_user["name"] == test_user.name ``` @@ -460,44 +438,22 @@ Create environment-specific settings: class LocalSettings(Settings): ENVIRONMENT: str = "local" DEBUG: bool = True - + + class ProductionSettings(Settings): ENVIRONMENT: str = "production" DEBUG: bool = False # Production-specific settings + def get_settings(): env = os.getenv("ENVIRONMENT", "local") if env == "production": return ProductionSettings() return LocalSettings() -settings = get_settings() -``` - -### Custom Logging -Configure logging in `src/app/core/config.py`: - -```python -import logging -from pythonjsonlogger import jsonlogger - -def setup_logging(): - # JSON logging for production - if settings.ENVIRONMENT == "production": - logHandler = logging.StreamHandler() - formatter = jsonlogger.JsonFormatter() - logHandler.setFormatter(formatter) - logger = logging.getLogger() - logger.addHandler(logHandler) - logger.setLevel(logging.INFO) - else: - # Simple logging for development - logging.basicConfig( - level=logging.DEBUG, - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" - ) +settings = get_settings() ``` ## Opting Out of Services @@ -505,7 +461,7 @@ def setup_logging(): ### Disabling Redis Caching 1. Remove cache decorators from endpoints -2. Update dependencies in `src/app/core/config.py`: +1. Update dependencies in `src/app/core/config.py`: ```python class Settings(BaseSettings): @@ -520,9 +476,9 @@ class Settings(BaseSettings): ### Disabling Background Tasks (ARQ) 1. Remove ARQ from `pyproject.toml` dependencies -2. Remove worker configuration from `docker-compose.yml` -3. Delete `src/app/core/worker/` directory -4. Remove task-related endpoints +1. Remove worker configuration from `docker-compose.yml` +1. Delete `src/app/core/worker/` directory +1. Remove task-related endpoints ### Disabling Rate Limiting @@ -530,18 +486,18 @@ class Settings(BaseSettings): ```python # Remove this dependency -dependencies=[Depends(rate_limiter_dependency)] +dependencies = [Depends(rate_limiter_dependency)] ``` 2. Remove rate limiting models and schemas -3. Update database migrations to remove rate limit tables +1. Update database migrations to remove rate limit tables ### Disabling Authentication 1. Remove JWT dependencies from protected endpoints -2. Remove user-related models and endpoints -3. Update database to remove user tables -4. Remove authentication middleware +1. Remove user-related models and endpoints +1. Update database to remove user tables +1. Remove authentication middleware ### Minimal FastAPI Setup @@ -551,16 +507,14 @@ For a minimal setup with just basic FastAPI: # src/app/main.py (minimal version) from fastapi import FastAPI -app = FastAPI( - title="Minimal API", - description="Basic FastAPI application", - version="1.0.0" -) +app = FastAPI(title="Minimal API", description="Basic FastAPI application", version="1.0.0") + @app.get("/") async def root(): return {"message": "Hello World"} + @app.get("/health") async def health_check(): return {"status": "healthy"} @@ -630,26 +584,28 @@ async def health_check(): ## Database Migrations !!! warning "Important Setup for Docker Users" - If you're using the database in Docker, you need to expose the port to run migrations. Change this in `docker-compose.yml`: - - ```yaml - db: - image: postgres:13 - env_file: - - ./src/.env - volumes: - - postgres-data:/var/lib/postgresql/data - # -------- replace with comment to run migrations with docker -------- - ports: - - 5432:5432 - # expose: - # - "5432" - ``` +If you're using the database in Docker, you need to expose the port to run migrations. Change this in `docker-compose.yml`: + +```` +```yaml +db: + image: postgres:13 + env_file: + - ./src/.env + volumes: + - postgres-data:/var/lib/postgresql/data + # -------- replace with comment to run migrations with docker -------- + ports: + - 5432:5432 + # expose: + # - "5432" +``` +```` ### Creating Migrations !!! warning "Model Import Requirement" - To create tables if you haven't created endpoints yet, ensure you import the models in `src/app/models/__init__.py`. This step is crucial for Alembic to detect new tables. +To create tables if you haven't created endpoints yet, ensure you import the models in `src/app/models/__init__.py`. This step is crucial for Alembic to detect new tables. While in the `src` folder, run Alembic migrations: @@ -662,16 +618,16 @@ uv run alembic upgrade head ``` !!! note "Without uv" - If you don't have uv, run `pip install alembic` first, then use `alembic` commands directly. +If you don't have uv, run `pip install alembic` first, then use `alembic` commands directly. ### Migration Workflow 1. **Make Model Changes** - Modify your SQLAlchemy models -2. **Import Models** - Ensure models are imported in `src/app/models/__init__.py` -3. **Generate Migration** - Run `alembic revision --autogenerate` -4. **Review Migration** - Check the generated migration file in `src/migrations/versions/` -5. **Apply Migration** - Run `alembic upgrade head` -6. **Test Changes** - Verify your changes work as expected +1. **Import Models** - Ensure models are imported in `src/app/models/__init__.py` +1. **Generate Migration** - Run `alembic revision --autogenerate` +1. **Review Migration** - Check the generated migration file in `src/migrations/versions/` +1. **Apply Migration** - Run `alembic upgrade head` +1. **Test Changes** - Verify your changes work as expected ### Common Migration Tasks @@ -684,9 +640,10 @@ from sqlalchemy.orm import Mapped, mapped_column from app.core.db.database import Base + class Category(Base): __tablename__ = "categories" - + id: Mapped[int] = mapped_column(primary_key=True) name: Mapped[str] = mapped_column(String(50)) description: Mapped[str] = mapped_column(String(255), nullable=True) @@ -726,4 +683,4 @@ uv run alembic revision --autogenerate -m "Add bio field to users" uv run alembic upgrade head ``` -This guide provides the foundation for extending and customizing the FastAPI boilerplate. For specific implementation details, refer to the existing code examples throughout the boilerplate. \ No newline at end of file +This guide provides the foundation for extending and customizing the FastAPI boilerplate. For specific implementation details, refer to the existing code examples throughout the boilerplate. diff --git a/docs/user-guide/production.md b/docs/user-guide/production.md index 3dfdd8b..5006110 100644 --- a/docs/user-guide/production.md +++ b/docs/user-guide/production.md @@ -293,18 +293,18 @@ http { location / { limit_req zone=api burst=20 nodelay; - + proxy_pass http://fastapi_backend; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; - + # Timeouts proxy_connect_timeout 60s; proxy_send_timeout 60s; proxy_read_timeout 60s; - + # Buffer settings proxy_buffering on; proxy_buffer_size 8k; @@ -354,7 +354,7 @@ server { } ``` -### Load Balancing Multiple Servers +### Load Balancing Multiple Servers For horizontal scaling with multiple FastAPI instances: @@ -389,7 +389,7 @@ upstream fastapi_backend { server web1:8000 weight=3; server web2:8000 weight=2; server web3:8000 weight=1; - + # Health checks keepalive 32; } @@ -404,7 +404,7 @@ server { proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; - + # Connection settings for load balancing proxy_http_version 1.1; proxy_set_header Connection ""; @@ -485,29 +485,6 @@ save 60 10000 ### Application Optimization -#### Logging Configuration - -```python -# src/app/core/config.py -import logging -from pythonjsonlogger import jsonlogger - -def setup_production_logging(): - logHandler = logging.StreamHandler() - formatter = jsonlogger.JsonFormatter( - "%(asctime)s %(name)s %(levelname)s %(message)s" - ) - logHandler.setFormatter(formatter) - - logger = logging.getLogger() - logger.addHandler(logHandler) - logger.setLevel(logging.INFO) - - # Reduce noise from third-party libraries - logging.getLogger("uvicorn.access").setLevel(logging.WARNING) - logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING) -``` - #### Performance Monitoring ```python @@ -516,19 +493,20 @@ import time from fastapi import Request from starlette.middleware.base import BaseHTTPMiddleware + class MonitoringMiddleware(BaseHTTPMiddleware): async def dispatch(self, request: Request, call_next): start_time = time.time() - + response = await call_next(request) - + process_time = time.time() - start_time response.headers["X-Process-Time"] = str(process_time) - + # Log slow requests if process_time > 1.0: logger.warning(f"Slow request: {request.method} {request.url} - {process_time:.2f}s") - + return response ``` @@ -541,14 +519,14 @@ class MonitoringMiddleware(BaseHTTPMiddleware): class ProductionSettings(Settings): # Hide docs in production ENVIRONMENT: str = "production" - + # Security settings SECRET_KEY: str = Field(..., min_length=32) ALLOWED_HOSTS: list[str] = ["your-domain.com", "api.your-domain.com"] - + # Database security POSTGRES_PASSWORD: str = Field(..., min_length=16) - + class Config: case_sensitive = True ``` @@ -578,14 +556,14 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - + - name: Build and push Docker image env: DOCKER_REGISTRY: your-registry.com run: | docker build -t $DOCKER_REGISTRY/fastapi-app:latest . docker push $DOCKER_REGISTRY/fastapi-app:latest - + - name: Deploy to production run: | # Your deployment commands @@ -625,12 +603,13 @@ from fastapi import APIRouter router = APIRouter() + @router.get("/metrics") async def get_metrics(): return { "cpu_percent": psutil.cpu_percent(), "memory_percent": psutil.virtual_memory().percent, - "disk_usage": psutil.disk_usage('/').percent + "disk_usage": psutil.disk_usage("/").percent, } ``` @@ -670,4 +649,4 @@ find $BACKUP_DIR -name "backup_*.sql.gz" -mtime +7 -delete - Optimize Docker image layers - Configure proper resource limits -This production guide provides a solid foundation for deploying the FastAPI boilerplate to production environments with proper performance, security, and reliability configurations. \ No newline at end of file +This production guide provides a solid foundation for deploying the FastAPI boilerplate to production environments with proper performance, security, and reliability configurations. diff --git a/logs/.gitkeep b/logs/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/pyproject.toml b/pyproject.toml index f5b2692..accfb1d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,6 +33,7 @@ dependencies = [ "gunicorn>=23.0.0", "ruff>=0.11.13", "mypy>=1.16.0", + "python-json-logger>=4.0.0", ] [project.optional-dependencies] diff --git a/scripts/local_with_uvicorn/.env.example b/scripts/local_with_uvicorn/.env.example index 9f3e5f4..101ac89 100644 --- a/scripts/local_with_uvicorn/.env.example +++ b/scripts/local_with_uvicorn/.env.example @@ -22,6 +22,11 @@ CONTACT_NAME="Me" CONTACT_EMAIL="my.email@example.com" LICENSE_NAME="MIT" +# ------------- logging settings ------------- +LOG_LEVEL="INFO" +LOG_FORMAT_AS_JSON=false +LOG_TO_FILE=false + # ------------- database ------------- POSTGRES_USER="postgres" POSTGRES_PASSWORD=1234 diff --git a/scripts/local_with_uvicorn/docker-compose.yml b/scripts/local_with_uvicorn/docker-compose.yml index e41c2c9..d2a957a 100644 --- a/scripts/local_with_uvicorn/docker-compose.yml +++ b/scripts/local_with_uvicorn/docker-compose.yml @@ -14,6 +14,7 @@ services: volumes: - ./src/app:/code/app - .env:/code/.env + - ./logs:/code/logs worker: build: diff --git a/src/app/__init__.py b/src/app/__init__.py index e69de29..f9e8b1e 100644 --- a/src/app/__init__.py +++ b/src/app/__init__.py @@ -0,0 +1,13 @@ +import os + +# Load environment variables from the .env file at project root. +# This makes variables available via os.environ before any other imports or configuration. +# If the .env file does not exist, load_dotenv does nothing and no error is raised. +# Latter in the application config.py, settings will be read from os.environ as needed. +from dotenv import load_dotenv + +from .core.logger import setup_logging + +env_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", ".env") +load_dotenv(env_path, encoding="utf-8") +setup_logging() diff --git a/src/app/api/dependencies.py b/src/app/api/dependencies.py index 5fead48..5438dfe 100644 --- a/src/app/api/dependencies.py +++ b/src/app/api/dependencies.py @@ -1,3 +1,4 @@ +import logging from typing import Annotated, Any from fastapi import Depends, HTTPException, Request @@ -6,7 +7,6 @@ from ..core.config import settings from ..core.db.database import async_get_db from ..core.exceptions.http_exceptions import ForbiddenException, RateLimitException, UnauthorizedException -from ..core.logger import logging from ..core.security import TokenType, oauth2_scheme, verify_token from ..core.utils.rate_limit import rate_limiter from ..crud.crud_rate_limit import crud_rate_limits diff --git a/src/app/core/config.py b/src/app/core/config.py index c031243..22d2932 100644 --- a/src/app/core/config.py +++ b/src/app/core/config.py @@ -1,11 +1,12 @@ -import os -import warnings +import logging from enum import Enum from typing import Self from pydantic import SecretStr, computed_field, field_validator, model_validator from pydantic_settings import BaseSettings, SettingsConfigDict +logger = logging.getLogger(__name__) + class AppSettings(BaseSettings): APP_NAME: str = "FastAPI app" @@ -27,6 +28,19 @@ def validate_hosts(cls, host: str) -> str: return host +class LogLevelOption(str, Enum): + DEBUG = "DEBUG" + INFO = "INFO" + WARNING = "WARNING" + ERROR = "ERROR" + + +class LoggingSettings(BaseSettings): + LOG_LEVEL: LogLevelOption = LogLevelOption.INFO + LOG_FORMAT_AS_JSON: bool = False + LOG_TO_FILE: bool = False + + class CryptSettings(BaseSettings): SECRET_KEY: SecretStr = SecretStr("secret-key") ALGORITHM: str = "HS256" @@ -126,14 +140,19 @@ class CRUDAdminSettings(BaseSettings): class EnvironmentOption(str, Enum): - LOCAL = "local" - STAGING = "staging" - PRODUCTION = "production" + LOCAL = "LOCAL" + STAGING = "STAGING" + PRODUCTION = "PRODUCTION" class EnvironmentSettings(BaseSettings): ENVIRONMENT: EnvironmentOption = EnvironmentOption.LOCAL + @field_validator("ENVIRONMENT", mode="before") + @classmethod + def normalize_environment(cls, v: str) -> str: + return v.upper() + class CORSSettings(BaseSettings): CORS_ORIGINS: list[str] = ["*"] @@ -155,37 +174,50 @@ class Settings( CRUDAdminSettings, EnvironmentSettings, CORSSettings, + LoggingSettings, ): model_config = SettingsConfigDict( - env_file=os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..", ".env"), - env_file_encoding="utf-8", case_sensitive=True, extra="ignore", ) @model_validator(mode="after") def validate_environment_settings(self) -> Self: - "The validation should not modify any of the settings. It should provide" - "feedback to the user if any misconfiguration is detected." + """The validation should not modify any of the settings. + + It should provide feedback to the user if any misconfiguration is detected, or raise and error if the + misconfiguration is critical. + """ + environment = self.ENVIRONMENT.value if self.ENVIRONMENT == EnvironmentOption.LOCAL: pass elif self.ENVIRONMENT == EnvironmentOption.STAGING: if "*" in self.CORS_ORIGINS: - warnings.warn( - "For security, in a staging environment CORS_ORIGINS should not include '*'. " - "It's recommended to specify explicit origins (e.g., ['https://staging.example.com'])." + logger.warning( + f"For security, in a {environment} environment CORS_ORIGINS should not include '*'. " + "It is recommended to specify explicit origins (e.g., ['https://staging.example.com'])." ) elif self.ENVIRONMENT == EnvironmentOption.PRODUCTION: if "*" in self.CORS_ORIGINS: raise ValueError( - "For security, in a production environment CORS_ORIGINS cannot include '*'. " + f"For security, in a {environment} environment CORS_ORIGINS cannot include '*'. " "You must specify explicit allowed origins (e.g., ['https://example.com', 'https://www.example.com'])." ) if self.APP_FRONTEND_HOST and not self.APP_FRONTEND_HOST.startswith("https://"): raise ValueError( - "In production, APP_FRONTEND_HOST must start with the https:// protocol. " + f"In {environment} environment, APP_FRONTEND_HOST must start with the https:// protocol. " f"Received the host '{self.APP_FRONTEND_HOST}'." ) + if self.LOG_LEVEL == LogLevelOption.DEBUG: + logger.warning( + f"In a {environment} environment, it is recommended to set LOG_LEVEL to INFO, WARNING, or ERROR. " + "It is currently being set to DEBUG." + ) + if self.LOG_FORMAT_AS_JSON is False: + logger.warning( + f"In a {environment} environment, it is recommended to set LOG_FORMAT_AS_JSON to true " + "if you are using log aggregation tools." + ) return self diff --git a/src/app/core/logger.py b/src/app/core/logger.py index 91b35a1..911b117 100644 --- a/src/app/core/logger.py +++ b/src/app/core/logger.py @@ -1,20 +1,133 @@ import logging +import logging.config import os -from logging.handlers import RotatingFileHandler +from datetime import UTC, datetime +from pathlib import Path +from typing import Any -LOG_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "logs") -if not os.path.exists(LOG_DIR): - os.makedirs(LOG_DIR) +from pythonjsonlogger.json import JsonFormatter -LOG_FILE_PATH = os.path.join(LOG_DIR, "app.log") -LOGGING_LEVEL = logging.INFO -LOGGING_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" +class ColoredFormatter(logging.Formatter): + """Colored formatter for development console output.""" -logging.basicConfig(level=LOGGING_LEVEL, format=LOGGING_FORMAT) + COLORS = { + "DEBUG": "\033[36m", # Cyan + "INFO": "\033[32m", # Green + "WARNING": "\033[33m", # Yellow + "ERROR": "\033[31m", # Red + "CRITICAL": "\033[35m", # Magenta + } + RESET = "\033[0m" -file_handler = RotatingFileHandler(LOG_FILE_PATH, maxBytes=10485760, backupCount=5) -file_handler.setLevel(LOGGING_LEVEL) -file_handler.setFormatter(logging.Formatter(LOGGING_FORMAT)) + def format(self, record: logging.LogRecord) -> str: + # Create a copy of the record to avoid modifying the original + record_copy = logging.makeLogRecord(record.__dict__) + log_color = self.COLORS.get(record_copy.levelname, "") + record_copy.levelname = f"{log_color}{record_copy.levelname}{self.RESET}" + return super().format(record_copy) -logging.getLogger("").addHandler(file_handler) + +def log_directory() -> Path: + """Ensure log directory exists and return the path.""" + log_dir = Path(__file__).parent.parent.parent / "logs" + log_dir.mkdir(parents=True, exist_ok=True) + return log_dir + + +def get_logging_config() -> dict[str, Any]: + """Get logging configuration.""" + + # We read logging settings from environment variables instead of settings in config.py + # to ensure logging is configured as early as possible, before settings are instantiated. + # In this way we can also capture any logs during settings validation. + log_level = os.environ.get("LOG_LEVEL", "INFO").upper() + log_to_file = os.environ.get("LOG_TO_FILE", "False").lower() == "true" + log_format_as_json = os.environ.get("LOG_FORMAT_AS_JSON", "False").lower() == "true" + + config = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "colored_text": { + "()": ColoredFormatter, + "format": "%(asctime)s- %(levelname)s - %(name)s - %(message)s", + "datefmt": "%Y-%m-%d %H:%M:%S", + }, + "plain_text": { + "format": "%(asctime)s- %(levelname)s - %(name)s - %(message)s", + "datefmt": "%Y-%m-%d %H:%M:%S", + }, + "json": { + "()": JsonFormatter, + "format": "%(asctime)s %(levelname)s %(name)s %(message)s %(pathname)s %(lineno)d", + }, + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": log_level, + "stream": "ext://sys.stdout", + "formatter": "colored_text", + }, + }, + "root": {"level": log_level, "handlers": ["console"]}, + "loggers": { + "uvicorn.access": { + "level": "INFO", + "handlers": ["console"], + "propagate": False, # Don't propagate to root logger to avoid double logging + }, + "uvicorn.error": {"level": "INFO"}, + "sqlalchemy.engine": {"level": "WARNING"}, # Hide SQL queries unless warning/error + "sqlalchemy.pool": {"level": "WARNING"}, + "httpx": {"level": "WARNING"}, # External HTTP client logs + "httpcore": {"level": "WARNING"}, + }, + } + + if log_to_file: + # Create file handler only when needed + log_dir = log_directory() + # Keeping filename timestamp granularity to minutes to avoid too + # many log files during development and reloding. Keeping it human + # readable for easier debugging using 3 letter month, in AM/PM format + # and without the year. It has to be in UTC as it runs in containers. + timestamp = datetime.now(UTC).strftime("%d-%b_%I-%M%p_UTC") + log_file = log_dir / f"web_{timestamp}.log" + + config["handlers"]["file"] = { # type: ignore[index] + "class": "logging.handlers.RotatingFileHandler", + "level": log_level, + "filename": str(log_file), + "maxBytes": 10485760, # 10MB + "backupCount": 5, + "formatter": "file", + } + config["root"]["handlers"].append("file") # type: ignore[index] + config["loggers"]["uvicorn.access"]["handlers"].append("file") # type: ignore[index] + if log_format_as_json: + config["handlers"]["file"]["formatter"] = "json" # type: ignore[index] + else: + config["handlers"]["file"]["formatter"] = "plain_text" # type: ignore[index] + + if log_format_as_json: + config["handlers"]["console"]["formatter"] = "json" # type: ignore[index] + + return config + + +def setup_logging() -> None: + """Setup logging configuration based on environment.""" + config = get_logging_config() + logging.config.dictConfig(config) + + # Log startup information + logger = logging.getLogger(__name__) + logger.info(f"Log level set to {config['root']['level']}") + if config["handlers"]["console"]["formatter"] == "json": + logger.info("Logs will be written in JSON format") + if "console" in config["root"]["handlers"]: + logger.info("Logs will be written to the console") + if "file" in config["root"]["handlers"]: + logger.info(f"Logs will be written to the file {config['handlers']['file']['filename']}") diff --git a/src/app/core/setup.py b/src/app/core/setup.py index b2cdcbf..9d14a98 100644 --- a/src/app/core/setup.py +++ b/src/app/core/setup.py @@ -188,7 +188,6 @@ def create_application( for caching, queue, and rate limiting, client-side caching, and customizing the API documentation based on the environment settings. """ - # --- before creating application --- if isinstance(settings, AppSettings): to_update = { "title": settings.APP_NAME, diff --git a/src/app/core/utils/rate_limit.py b/src/app/core/utils/rate_limit.py index 9cbb4eb..8942392 100644 --- a/src/app/core/utils/rate_limit.py +++ b/src/app/core/utils/rate_limit.py @@ -1,10 +1,10 @@ +import logging from datetime import UTC, datetime from typing import Optional from redis.asyncio import ConnectionPool, Redis from sqlalchemy.ext.asyncio import AsyncSession -from ...core.logger import logging from ...schemas.rate_limit import sanitize_path logger = logging.getLogger(__name__) @@ -12,8 +12,8 @@ class RateLimiter: _instance: Optional["RateLimiter"] = None - pool: Optional[ConnectionPool] = None - client: Optional[Redis] = None + pool: ConnectionPool | None = None + client: Redis | None = None def __new__(cls) -> "RateLimiter": if cls._instance is None: diff --git a/src/scripts/create_first_superuser.py b/src/scripts/create_first_superuser.py index baf58af..39605dc 100644 --- a/src/scripts/create_first_superuser.py +++ b/src/scripts/create_first_superuser.py @@ -11,7 +11,6 @@ from ..app.core.security import get_password_hash from ..app.models.user import User -logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) diff --git a/src/scripts/create_first_tier.py b/src/scripts/create_first_tier.py index baceb9f..3b9572a 100644 --- a/src/scripts/create_first_tier.py +++ b/src/scripts/create_first_tier.py @@ -7,7 +7,6 @@ from ..app.core.db.database import AsyncSession, local_session from ..app.models.tier import Tier -logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) diff --git a/uv.lock b/uv.lock index 5dda7a2..58eb2b0 100644 --- a/uv.lock +++ b/uv.lock @@ -398,6 +398,7 @@ dependencies = [ { name = "pydantic-settings" }, { name = "python-dotenv" }, { name = "python-jose" }, + { name = "python-json-logger" }, { name = "python-multipart" }, { name = "redis" }, { name = "ruff" }, @@ -449,6 +450,7 @@ requires-dist = [ { name = "pytest-mock", marker = "extra == 'dev'", specifier = ">=3.14.0" }, { name = "python-dotenv", specifier = ">=1.0.0" }, { name = "python-jose", specifier = ">=3.3.0" }, + { name = "python-json-logger", specifier = ">=4.0.0" }, { name = "python-multipart", specifier = ">=0.0.9" }, { name = "redis", specifier = ">=5.0.1" }, { name = "ruff", specifier = ">=0.11.13" }, @@ -1109,6 +1111,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d9/c3/0bd11992072e6a1c513b16500a5d07f91a24017c5909b02c72c62d7ad024/python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771", size = 34624, upload-time = "2025-05-28T17:31:52.802Z" }, ] +[[package]] +name = "python-json-logger" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/29/bf/eca6a3d43db1dae7070f70e160ab20b807627ba953663ba07928cdd3dc58/python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f", size = 17683, upload-time = "2025-10-06T04:15:18.984Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/e5/fecf13f06e5e5f67e8837d777d1bc43fac0ed2b77a676804df5c34744727/python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2", size = 15548, upload-time = "2025-10-06T04:15:17.553Z" }, +] + [[package]] name = "python-multipart" version = "0.0.20"