Over the past four years of developing enterprise FastAPI applications, I've learned that a well-structured project template is crucial for long-term maintainability and scalability. In this guide, I'll share my battle-tested approach to creating a Python FastAPI project template that can scale from simple APIs to complex microservices.
Personal Experience Note: When I first started with FastAPI, I struggled with organizing code as projects grew larger. After architecting multiple applications processing millions of requests daily, I've developed a template structure that has proven effective across various use cases.
Project Structure
Here's the complete project structure I've refined through multiple production deployments:
fastapi_project/ ├── app/ │ ├── __init__.py │ ├── main.py │ ├── core/ │ │ ├── __init__.py │ │ ├── config.py │ │ ├── security.py │ │ └── exceptions.py │ ├── api/ │ │ ├── __init__.py │ │ ├── v1/ │ │ │ ├── __init__.py │ │ │ ├── endpoints/ │ │ │ ├── schemas/ │ │ │ └── dependencies.py │ │ └── deps.py │ ├── models/ │ │ ├── __init__.py │ │ └── base.py │ ├── schemas/ │ │ ├── __init__.py │ │ └── base.py │ ├── crud/ │ │ ├── __init__.py │ │ └── base.py │ ├── db/ │ │ ├── __init__.py │ │ ├── base.py │ │ └── session.py │ └── services/ │ ├── __init__.py │ └── base.py ├── tests/ │ ├── __init__.py │ ├── conftest.py │ └── api/ ├── alembic/ │ ├── versions/ │ ├── env.py │ └── alembic.ini ├── scripts/ ├── docs/ ├── .env ├── .env.example ├── requirements/ │ ├── base.txt │ ├── dev.txt │ └── prod.txt ├── Dockerfile ├── docker-compose.yml └── README.md
Initial Setup
Let's create a production-ready FastAPI project from scratch:
# Create project directory and virtual environment mkdir fastapi_project && cd fastapi_project python -m venv venv source venv/bin/activate # On Windows: venv\Scripts\activate # Install base requirements pip install fastapi[all] uvicorn sqlalchemy alembic pydantic-settings python-jose passlib python-multipart
Core Configuration
First, let's set up a robust configuration system:
# app/core/config.py from pydantic_settings import BaseSettings from typing import List, Optional from functools import lru_cache class Settings(BaseSettings): API_V1_STR: str = "/api/v1" PROJECT_NAME: str = "FastAPI Project" POSTGRES_SERVER: str POSTGRES_USER: str POSTGRES_PASSWORD: str POSTGRES_DB: str SQLALCHEMY_DATABASE_URI: Optional[str] = None JWT_SECRET: str ALGORITHM: str = "HS256" ACCESS_TOKEN_EXPIRE_MINUTES: int = 30 BACKEND_CORS_ORIGINS: List[str] = [] class Config: env_file = ".env" case_sensitive = True def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.SQLALCHEMY_DATABASE_URI = ( f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}" f"@{self.POSTGRES_SERVER}/{self.POSTGRES_DB}" ) @lru_cache def get_settings(): return Settings()
Database Setup
Set up SQLAlchemy with async support:
# app/db/base.py from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession from sqlalchemy.orm import declarative_base, sessionmaker from app.core.config import get_settings settings = get_settings() engine = create_async_engine( settings.SQLALCHEMY_DATABASE_URI, pool_pre_ping=True, echo=True, ) AsyncSessionLocal = sessionmaker( engine, class_=AsyncSession, expire_on_commit=False, ) Base = declarative_base() async def get_db(): async with AsyncSessionLocal() as session: try: yield session await session.commit() except Exception: await session.rollback() raise finally: await session.close()
Model and Schema Structure
Create base models and schemas for consistency:
# app/models/base.py from datetime import datetime from sqlalchemy import Column, Integer, DateTime from app.db.base import Base class BaseModel(Base): __abstract__ = True id = Column(Integer, primary_key=True, index=True) created_at = Column(DateTime, default=datetime.utcnow) updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) # app/schemas/base.py from pydantic import BaseModel, ConfigDict from datetime import datetime class BaseSchema(BaseModel): model_config = ConfigDict(from_attributes=True) class BaseAPISchema(BaseSchema): id: int created_at: datetime updated_at: datetime
API Structure and Dependencies
Set up a clean API structure with dependencies:
# app/api/deps.py from fastapi import Depends, HTTPException, status from fastapi.security import OAuth2PasswordBearer from jose import jwt, JWTError from sqlalchemy.ext.asyncio import AsyncSession from app.core.config import get_settings from app.db.base import get_db from app.models.user import User settings = get_settings() oauth2_scheme = OAuth2PasswordBearer(tokenUrl=f"{settings.API_V1_STR}/auth/login") async def get_current_user( db: AsyncSession = Depends(get_db), token: str = Depends(oauth2_scheme) ) -> User: credentials_exception = HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="Could not validate credentials", headers={"WWW-Authenticate": "Bearer"}, ) try: payload = jwt.decode( token, settings.JWT_SECRET, algorithms=[settings.ALGORITHM] ) user_id: int = payload.get("sub") if user_id is None: raise credentials_exception except JWTError: raise credentials_exception user = await db.get(User, user_id) if user is None: raise credentials_exception return user
CRUD Operations
Implement reusable CRUD operations:
# app/crud/base.py from typing import Generic, TypeVar, Type, Optional, List from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select from fastapi.encoders import jsonable_encoder from pydantic import BaseModel from app.db.base import Base ModelType = TypeVar("ModelType", bound=Base) CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel) UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel) class CRUDBase(Generic[ModelType, CreateSchemaType, UpdateSchemaType]): def __init__(self, model: Type[ModelType]): self.model = model async def get(self, db: AsyncSession, id: int) -> Optional[ModelType]: return await db.get(self.model, id) async def get_multi( self, db: AsyncSession, *, skip: int = 0, limit: int = 100 ) -> List[ModelType]: query = select(self.model).offset(skip).limit(limit) result = await db.execute(query) return result.scalars().all() async def create( self, db: AsyncSession, *, obj_in: CreateSchemaType ) -> ModelType: obj_in_data = jsonable_encoder(obj_in) db_obj = self.model(**obj_in_data) db.add(db_obj) await db.commit() await db.refresh(db_obj) return db_obj
Testing Setup
Create a robust testing framework:
# tests/conftest.py import pytest from httpx import AsyncClient from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession from sqlalchemy.orm import sessionmaker from app.main import app from app.db.base import Base from app.api.deps import get_db TEST_SQLALCHEMY_DATABASE_URL = "postgresql+asyncpg://test:test@localhost:5432/test_db" @pytest.fixture(scope="session") def event_loop(): import asyncio loop = asyncio.get_event_loop_policy().new_event_loop() yield loop loop.close() @pytest.fixture async def test_db(): engine = create_async_engine( TEST_SQLALCHEMY_DATABASE_URL, pool_pre_ping=True, echo=True ) async with engine.begin() as conn: await conn.run_sync(Base.metadata.create_all) TestingSessionLocal = sessionmaker( engine, class_=AsyncSession, expire_on_commit=False ) async def override_get_db(): async with TestingSessionLocal() as session: yield session app.dependency_overrides[get_db] = override_get_db yield async with engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) @pytest.fixture async def client(test_db): async with AsyncClient(app=app, base_url="http://test") as client: yield client
Development Tools
Docker Configuration
# Dockerfile FROM python:3.11-slim WORKDIR /app COPY requirements requirements RUN pip install --no-cache-dir -r requirements/prod.txt COPY . . CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
Docker Compose Setup
# docker-compose.yml version: '3.8' services: web: build: . ports: - "8000:8000" env_file: - .env depends_on: - db db: image: postgres:13 volumes: - postgres_data:/var/lib/postgresql/data env_file: - .env volumes: postgres_data:
Performance Tips
Key optimizations I've learned from production:
# 1. Use connection pooling from sqlalchemy.pool import QueuePool engine = create_async_engine( settings.SQLALCHEMY_DATABASE_URI, poolclass=QueuePool, pool_size=5, max_overflow=10, pool_timeout=30 ) # 2. Implement caching from fastapi_cache import FastAPICache from fastapi_cache.backends.redis import RedisBackend @app.on_event("startup") async def startup(): redis = aioredis.from_url("redis://localhost") FastAPICache.init(RedisBackend(redis), prefix="fastapi-cache:")
Conclusion
This template provides a solid foundation for building FastAPI applications that can scale. The key is maintaining a clear structure while following FastAPI's best practices for async operations and dependency injection.
Remember: A good template is flexible enough to accommodate your project's growth but structured enough to maintain consistency. Adjust this template based on your specific needs while keeping the core organizational principles intact.
For more information and updates, visit the FastAPI documentation or check out the official GitHub repository.