mirror of
https://github.com/d3vyce/fastapi-toolsets.git
synced 2026-04-16 14:46:24 +02:00
Compare commits
20 Commits
v2.4.3
...
168345756f
| Author | SHA1 | Date | |
|---|---|---|---|
|
168345756f
|
|||
|
4342d224c7
|
|||
|
233362dd35
|
|||
|
f13e61c5a7
|
|||
|
|
32059dcb02 | ||
|
|
f027981e80 | ||
|
|
5c1487c24a | ||
|
|
ebaa61525f | ||
|
|
4829cfba73 | ||
|
|
9ca2da4213 | ||
|
|
0b3f097012 | ||
|
|
1890d696bf | ||
|
|
104285c6e5 | ||
|
|
f5afbbe37f | ||
|
|
f4698bea8a | ||
|
|
5215b921ae | ||
|
9dad59e25d
|
|||
|
|
29326ab532 | ||
|
|
04afef7e33 | ||
|
|
666c621fda |
52
.github/workflows/docs.yml
vendored
52
.github/workflows/docs.yml
vendored
@@ -5,20 +5,15 @@ on:
|
|||||||
types: [published]
|
types: [published]
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: write
|
||||||
pages: write
|
|
||||||
id-token: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
deploy:
|
deploy:
|
||||||
environment:
|
|
||||||
name: github-pages
|
|
||||||
url: ${{ steps.deployment.outputs.page_url }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/configure-pages@v5
|
|
||||||
|
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@v7
|
uses: astral-sh/setup-uv@v7
|
||||||
@@ -28,11 +23,40 @@ jobs:
|
|||||||
|
|
||||||
- run: uv sync --group dev
|
- run: uv sync --group dev
|
||||||
|
|
||||||
- run: uv run zensical build --clean
|
- name: Configure git
|
||||||
|
run: |
|
||||||
|
git config user.name "github-actions[bot]"
|
||||||
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
|
||||||
- uses: actions/upload-pages-artifact@v4
|
- name: Deploy documentation
|
||||||
with:
|
run: |
|
||||||
path: site
|
VERSION=${GITHUB_REF_NAME#v}
|
||||||
|
MAJOR=$(echo "$VERSION" | cut -d. -f1)
|
||||||
|
DEPLOY_VERSION="v$(echo "$VERSION" | cut -d. -f1-2)"
|
||||||
|
|
||||||
- uses: actions/deploy-pages@v5
|
# On new major: consolidate previous major's feature versions into vX
|
||||||
id: deployment
|
PREV_MAJOR=$((MAJOR - 1))
|
||||||
|
OLD_FEATURE_VERSIONS=$(uv run mike list 2>/dev/null | grep -oE "^v${PREV_MAJOR}\.[0-9]+" || true)
|
||||||
|
|
||||||
|
if [ -n "$OLD_FEATURE_VERSIONS" ]; then
|
||||||
|
LATEST_PREV_TAG=$(git tag -l "v${PREV_MAJOR}.*" | sort -V | tail -1)
|
||||||
|
|
||||||
|
if [ -n "$LATEST_PREV_TAG" ]; then
|
||||||
|
git checkout "$LATEST_PREV_TAG" -- docs/ src/ zensical.toml
|
||||||
|
if ! grep -q '\[project\.extra\.version\]' zensical.toml; then
|
||||||
|
printf '\n[project.extra.version]\nprovider = "mike"\ndefault = "stable"\nalias = true\n' >> zensical.toml
|
||||||
|
fi
|
||||||
|
uv run mike deploy "v${PREV_MAJOR}"
|
||||||
|
git checkout HEAD -- docs/ src/ zensical.toml
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Delete old feature versions
|
||||||
|
echo "$OLD_FEATURE_VERSIONS" | while read -r OLD_V; do
|
||||||
|
echo "Deleting $OLD_V"
|
||||||
|
uv run mike delete "$OLD_V"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
uv run mike deploy --update-aliases "$DEPLOY_VERSION" stable
|
||||||
|
uv run mike set-default stable
|
||||||
|
git push origin gh-pages
|
||||||
|
|||||||
@@ -48,7 +48,8 @@ uv add "fastapi-toolsets[all]"
|
|||||||
- **Database**: Session management, transaction helpers, table locking, and polling-based row change detection
|
- **Database**: Session management, transaction helpers, table locking, and polling-based row change detection
|
||||||
- **Dependencies**: FastAPI dependency factories (`PathDependency`, `BodyDependency`) for automatic DB lookups from path or body parameters
|
- **Dependencies**: FastAPI dependency factories (`PathDependency`, `BodyDependency`) for automatic DB lookups from path or body parameters
|
||||||
- **Fixtures**: Fixture system with dependency management, context support, and pytest integration
|
- **Fixtures**: Fixture system with dependency management, context support, and pytest integration
|
||||||
- **Model Mixins**: SQLAlchemy mixins for common column patterns (`UUIDMixin`, `UUIDv7Mixin`, `CreatedAtMixin`, `UpdatedAtMixin`, `TimestampMixin`) and lifecycle callbacks (`WatchedFieldsMixin`, `@watch`) that fire after commit for insert, update, and delete events
|
- **Model Mixins**: SQLAlchemy mixins for common column patterns (`UUIDMixin`, `UUIDv7Mixin`, `CreatedAtMixin`, `UpdatedAtMixin`, `TimestampMixin`)
|
||||||
|
- **Lifecycle Events**: Post-commit event system (`EventSession`, `listens_for`) that dispatches async/sync callbacks for insert, update, and delete operations
|
||||||
- **Standardized API Responses**: Consistent response format with `Response`, `ErrorResponse`, `PaginatedResponse`, `CursorPaginatedResponse` and `OffsetPaginatedResponse`.
|
- **Standardized API Responses**: Consistent response format with `Response`, `ErrorResponse`, `PaginatedResponse`, `CursorPaginatedResponse` and `OffsetPaginatedResponse`.
|
||||||
- **Exception Handling**: Structured error responses with automatic OpenAPI documentation
|
- **Exception Handling**: Structured error responses with automatic OpenAPI documentation
|
||||||
- **Logging**: Logging configuration with uvicorn integration via `configure_logging` and `get_logger`
|
- **Logging**: Logging configuration with uvicorn integration via `configure_logging` and `get_logger`
|
||||||
|
|||||||
1
docs/examples/authentication.md
Normal file
1
docs/examples/authentication.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# Authentication
|
||||||
@@ -43,16 +43,16 @@ Declare `searchable_fields`, `facet_fields`, and `order_fields` once on [`CrudFa
|
|||||||
|
|
||||||
## Routes
|
## Routes
|
||||||
|
|
||||||
```python title="routes.py:1:17"
|
```python title="routes.py:1:16"
|
||||||
--8<-- "docs_src/examples/pagination_search/routes.py:1:17"
|
--8<-- "docs_src/examples/pagination_search/routes.py:1:16"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Offset pagination
|
### Offset pagination
|
||||||
|
|
||||||
Best for admin panels or any UI that needs a total item count and numbered pages.
|
Best for admin panels or any UI that needs a total item count and numbered pages.
|
||||||
|
|
||||||
```python title="routes.py:20:40"
|
```python title="routes.py:19:37"
|
||||||
--8<-- "docs_src/examples/pagination_search/routes.py:20:40"
|
--8<-- "docs_src/examples/pagination_search/routes.py:19:37"
|
||||||
```
|
```
|
||||||
|
|
||||||
**Example request**
|
**Example request**
|
||||||
@@ -92,8 +92,8 @@ To skip the `COUNT(*)` query for better performance on large tables, pass `inclu
|
|||||||
|
|
||||||
Best for feeds, infinite scroll, or any high-throughput API where offset performance degrades.
|
Best for feeds, infinite scroll, or any high-throughput API where offset performance degrades.
|
||||||
|
|
||||||
```python title="routes.py:43:63"
|
```python title="routes.py:40:58"
|
||||||
--8<-- "docs_src/examples/pagination_search/routes.py:43:63"
|
--8<-- "docs_src/examples/pagination_search/routes.py:40:58"
|
||||||
```
|
```
|
||||||
|
|
||||||
**Example request**
|
**Example request**
|
||||||
@@ -132,8 +132,8 @@ Pass `next_cursor` as the `cursor` query parameter on the next request to advanc
|
|||||||
|
|
||||||
[`paginate()`](../module/crud.md#unified-paginate--both-strategies-on-one-endpoint) lets a single endpoint support both strategies via a `pagination_type` query parameter. The `pagination_type` field in the response acts as a discriminator for frontend tooling.
|
[`paginate()`](../module/crud.md#unified-paginate--both-strategies-on-one-endpoint) lets a single endpoint support both strategies via a `pagination_type` query parameter. The `pagination_type` field in the response acts as a discriminator for frontend tooling.
|
||||||
|
|
||||||
```python title="routes.py:66:90"
|
```python title="routes.py:61:79"
|
||||||
--8<-- "docs_src/examples/pagination_search/routes.py:66:90"
|
--8<-- "docs_src/examples/pagination_search/routes.py:61:79"
|
||||||
```
|
```
|
||||||
|
|
||||||
**Offset request** (default)
|
**Offset request** (default)
|
||||||
|
|||||||
@@ -48,7 +48,8 @@ uv add "fastapi-toolsets[all]"
|
|||||||
- **Database**: Session management, transaction helpers, table locking, and polling-based row change detection
|
- **Database**: Session management, transaction helpers, table locking, and polling-based row change detection
|
||||||
- **Dependencies**: FastAPI dependency factories (`PathDependency`, `BodyDependency`) for automatic DB lookups from path or body parameters
|
- **Dependencies**: FastAPI dependency factories (`PathDependency`, `BodyDependency`) for automatic DB lookups from path or body parameters
|
||||||
- **Fixtures**: Fixture system with dependency management, context support, and pytest integration
|
- **Fixtures**: Fixture system with dependency management, context support, and pytest integration
|
||||||
- **Model Mixins**: SQLAlchemy mixins for common column patterns (`UUIDMixin`, `UUIDv7Mixin`, `CreatedAtMixin`, `UpdatedAtMixin`, `TimestampMixin`) and lifecycle callbacks (`WatchedFieldsMixin`) that fire after commit for insert, update, and delete events.
|
- **Model Mixins**: SQLAlchemy mixins for common column patterns (`UUIDMixin`, `UUIDv7Mixin`, `CreatedAtMixin`, `UpdatedAtMixin`, `TimestampMixin`).
|
||||||
|
- **Lifecycle Events**: Post-commit event system (`EventSession`, `listens_for`) that dispatches async/sync callbacks for insert, update, and delete operations.
|
||||||
- **Standardized API Responses**: Consistent response format with `Response`, `ErrorResponse`, `PaginatedResponse`, `CursorPaginatedResponse` and `OffsetPaginatedResponse`.
|
- **Standardized API Responses**: Consistent response format with `Response`, `ErrorResponse`, `PaginatedResponse`, `CursorPaginatedResponse` and `OffsetPaginatedResponse`.
|
||||||
- **Exception Handling**: Structured error responses with automatic OpenAPI documentation
|
- **Exception Handling**: Structured error responses with automatic OpenAPI documentation
|
||||||
- **Logging**: Logging configuration with uvicorn integration via `configure_logging` and `get_logger`
|
- **Logging**: Logging configuration with uvicorn integration via `configure_logging` and `get_logger`
|
||||||
|
|||||||
93
docs/migration/v3.md
Normal file
93
docs/migration/v3.md
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
# Migrating to v3.0
|
||||||
|
|
||||||
|
This page covers every breaking change introduced in **v3.0** and the steps required to update your code.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Models
|
||||||
|
|
||||||
|
The lifecycle event system has been rewritten. Callbacks are now registered with a module-level [`listens_for`](../reference/models.md#fastapi_toolsets.models.listens_for) decorator and dispatched by [`EventSession`](../reference/models.md#fastapi_toolsets.models.EventSession), replacing the mixin-based approach from `v2`.
|
||||||
|
|
||||||
|
### `WatchedFieldsMixin` and `@watch` removed
|
||||||
|
|
||||||
|
Importing `WatchedFieldsMixin` or `watch` will raise `ImportError`.
|
||||||
|
|
||||||
|
Model method callbacks (`on_create`, `on_delete`, `on_update`) and the `@watch` decorator are replaced by:
|
||||||
|
|
||||||
|
1. **`__watched_fields__`** — a plain class attribute to restrict which field changes trigger `UPDATE` events (replaces `@watch`).
|
||||||
|
2. **`@listens_for`** — a module-level decorator to register callbacks for one or more [`ModelEvent`](../reference/models.md#fastapi_toolsets.models.ModelEvent) types (replaces `on_create` / `on_delete` / `on_update` methods).
|
||||||
|
|
||||||
|
=== "Before (`v2`)"
|
||||||
|
|
||||||
|
```python
|
||||||
|
from fastapi_toolsets.models import WatchedFieldsMixin, watch
|
||||||
|
|
||||||
|
@watch("status")
|
||||||
|
class Order(Base, UUIDMixin, WatchedFieldsMixin):
|
||||||
|
__tablename__ = "orders"
|
||||||
|
|
||||||
|
status: Mapped[str]
|
||||||
|
|
||||||
|
async def on_create(self):
|
||||||
|
await notify_new_order(self.id)
|
||||||
|
|
||||||
|
async def on_update(self, changes):
|
||||||
|
if "status" in changes:
|
||||||
|
await notify_status_change(self.id, changes["status"])
|
||||||
|
|
||||||
|
async def on_delete(self):
|
||||||
|
await notify_order_cancelled(self.id)
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Now (`v3`)"
|
||||||
|
|
||||||
|
```python
|
||||||
|
from fastapi_toolsets.models import ModelEvent, UUIDMixin, listens_for
|
||||||
|
|
||||||
|
class Order(Base, UUIDMixin):
|
||||||
|
__tablename__ = "orders"
|
||||||
|
__watched_fields__ = ("status",)
|
||||||
|
|
||||||
|
status: Mapped[str]
|
||||||
|
|
||||||
|
@listens_for(Order, [ModelEvent.CREATE])
|
||||||
|
async def on_order_created(order: Order, event_type: ModelEvent, changes: None):
|
||||||
|
await notify_new_order(order.id)
|
||||||
|
|
||||||
|
@listens_for(Order, [ModelEvent.UPDATE])
|
||||||
|
async def on_order_updated(order: Order, event_type: ModelEvent, changes: dict):
|
||||||
|
if "status" in changes:
|
||||||
|
await notify_status_change(order.id, changes["status"])
|
||||||
|
|
||||||
|
@listens_for(Order, [ModelEvent.DELETE])
|
||||||
|
async def on_order_deleted(order: Order, event_type: ModelEvent, changes: None):
|
||||||
|
await notify_order_cancelled(order.id)
|
||||||
|
```
|
||||||
|
|
||||||
|
### `EventSession` now required
|
||||||
|
|
||||||
|
Without `EventSession`, lifecycle callbacks will silently stop firing.
|
||||||
|
|
||||||
|
Callbacks are now dispatched inside `EventSession.commit()` rather than via background tasks. Pass it as the session class when creating your session factory:
|
||||||
|
|
||||||
|
=== "Before (`v2`)"
|
||||||
|
|
||||||
|
```python
|
||||||
|
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||||
|
|
||||||
|
engine = create_async_engine("postgresql+asyncpg://...")
|
||||||
|
SessionLocal = async_sessionmaker(engine, expire_on_commit=False)
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Now (`v3`)"
|
||||||
|
|
||||||
|
```python
|
||||||
|
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||||
|
from fastapi_toolsets.models import EventSession
|
||||||
|
|
||||||
|
engine = create_async_engine("postgresql+asyncpg://...")
|
||||||
|
SessionLocal = async_sessionmaker(engine, expire_on_commit=False, class_=EventSession)
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
If you use `create_db_session` from `fastapi_toolsets.pytest`, the session already uses `EventSession` — no changes needed in tests.
|
||||||
@@ -159,18 +159,15 @@ Three pagination methods are available. All return a typed response whose `pagi
|
|||||||
### Offset pagination
|
### Offset pagination
|
||||||
|
|
||||||
```python
|
```python
|
||||||
|
from typing import Annotated
|
||||||
|
from fastapi import Depends
|
||||||
|
|
||||||
@router.get("")
|
@router.get("")
|
||||||
async def get_users(
|
async def get_users(
|
||||||
session: SessionDep,
|
session: SessionDep,
|
||||||
items_per_page: int = 50,
|
params: Annotated[dict, Depends(UserCrud.offset_paginate_params())],
|
||||||
page: int = 1,
|
|
||||||
) -> OffsetPaginatedResponse[UserRead]:
|
) -> OffsetPaginatedResponse[UserRead]:
|
||||||
return await UserCrud.offset_paginate(
|
return await UserCrud.offset_paginate(session=session, **params, schema=UserRead)
|
||||||
session=session,
|
|
||||||
items_per_page=items_per_page,
|
|
||||||
page=page,
|
|
||||||
schema=UserRead,
|
|
||||||
)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
The [`offset_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.offset_paginate) method returns an [`OffsetPaginatedResponse`](../reference/schemas.md#fastapi_toolsets.schemas.OffsetPaginatedResponse):
|
The [`offset_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.offset_paginate) method returns an [`OffsetPaginatedResponse`](../reference/schemas.md#fastapi_toolsets.schemas.OffsetPaginatedResponse):
|
||||||
@@ -194,32 +191,13 @@ The [`offset_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.Async
|
|||||||
|
|
||||||
!!! info "Added in `v2.4.1`"
|
!!! info "Added in `v2.4.1`"
|
||||||
|
|
||||||
By default `offset_paginate` runs two queries: one for the page items and one `COUNT(*)` for `total_count`. On large tables the `COUNT` can be expensive. Pass `include_total=False` to skip it:
|
By default `offset_paginate` runs two queries: one for the page items and one `COUNT(*)` for `total_count`. On large tables the `COUNT` can be expensive. Pass `include_total=False` to `offset_paginate_params()` to skip it:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
result = await UserCrud.offset_paginate(
|
|
||||||
session=session,
|
|
||||||
page=page,
|
|
||||||
items_per_page=items_per_page,
|
|
||||||
include_total=False,
|
|
||||||
schema=UserRead,
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Pagination params dependency
|
|
||||||
|
|
||||||
!!! info "Added in `v2.4.1`"
|
|
||||||
|
|
||||||
Use [`offset_params()`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.offset_params) to generate a FastAPI dependency that injects `page` and `items_per_page` from query parameters with configurable defaults and a `max_page_size` cap:
|
|
||||||
|
|
||||||
```python
|
|
||||||
from typing import Annotated
|
|
||||||
from fastapi import Depends
|
|
||||||
|
|
||||||
@router.get("")
|
@router.get("")
|
||||||
async def list_users(
|
async def get_users(
|
||||||
session: SessionDep,
|
session: SessionDep,
|
||||||
params: Annotated[dict, Depends(UserCrud.offset_params(default_page_size=20, max_page_size=100))],
|
params: Annotated[dict, Depends(UserCrud.offset_paginate_params(include_total=False))],
|
||||||
) -> OffsetPaginatedResponse[UserRead]:
|
) -> OffsetPaginatedResponse[UserRead]:
|
||||||
return await UserCrud.offset_paginate(session=session, **params, schema=UserRead)
|
return await UserCrud.offset_paginate(session=session, **params, schema=UserRead)
|
||||||
```
|
```
|
||||||
@@ -230,15 +208,9 @@ async def list_users(
|
|||||||
@router.get("")
|
@router.get("")
|
||||||
async def list_users(
|
async def list_users(
|
||||||
session: SessionDep,
|
session: SessionDep,
|
||||||
cursor: str | None = None,
|
params: Annotated[dict, Depends(UserCrud.cursor_paginate_params())],
|
||||||
items_per_page: int = 20,
|
|
||||||
) -> CursorPaginatedResponse[UserRead]:
|
) -> CursorPaginatedResponse[UserRead]:
|
||||||
return await UserCrud.cursor_paginate(
|
return await UserCrud.cursor_paginate(session=session, **params, schema=UserRead)
|
||||||
session=session,
|
|
||||||
cursor=cursor,
|
|
||||||
items_per_page=items_per_page,
|
|
||||||
schema=UserRead,
|
|
||||||
)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
The [`cursor_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.cursor_paginate) method returns a [`CursorPaginatedResponse`](../reference/schemas.md#fastapi_toolsets.schemas.CursorPaginatedResponse):
|
The [`cursor_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.cursor_paginate) method returns a [`CursorPaginatedResponse`](../reference/schemas.md#fastapi_toolsets.schemas.CursorPaginatedResponse):
|
||||||
@@ -291,24 +263,6 @@ PostCrud = CrudFactory(model=Post, cursor_column=Post.id)
|
|||||||
PostCrud = CrudFactory(model=Post, cursor_column=Post.created_at)
|
PostCrud = CrudFactory(model=Post, cursor_column=Post.created_at)
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Pagination params dependency
|
|
||||||
|
|
||||||
!!! info "Added in `v2.4.1`"
|
|
||||||
|
|
||||||
Use [`cursor_params()`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.cursor_params) to inject `cursor` and `items_per_page` from query parameters with a `max_page_size` cap:
|
|
||||||
|
|
||||||
```python
|
|
||||||
from typing import Annotated
|
|
||||||
from fastapi import Depends
|
|
||||||
|
|
||||||
@router.get("")
|
|
||||||
async def list_users(
|
|
||||||
session: SessionDep,
|
|
||||||
params: Annotated[dict, Depends(UserCrud.cursor_params(default_page_size=20, max_page_size=100))],
|
|
||||||
) -> CursorPaginatedResponse[UserRead]:
|
|
||||||
return await UserCrud.cursor_paginate(session=session, **params, schema=UserRead)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Unified endpoint (both strategies)
|
### Unified endpoint (both strategies)
|
||||||
|
|
||||||
!!! info "Added in `v2.3.0`"
|
!!! info "Added in `v2.3.0`"
|
||||||
@@ -316,25 +270,14 @@ async def list_users(
|
|||||||
[`paginate()`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.paginate) dispatches to `offset_paginate` or `cursor_paginate` based on a `pagination_type` query parameter, letting you expose **one endpoint** that supports both strategies. The `pagination_type` field in the response tells clients which strategy was used, enabling frontend discriminated-union typing.
|
[`paginate()`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.paginate) dispatches to `offset_paginate` or `cursor_paginate` based on a `pagination_type` query parameter, letting you expose **one endpoint** that supports both strategies. The `pagination_type` field in the response tells clients which strategy was used, enabling frontend discriminated-union typing.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from fastapi_toolsets.crud import PaginationType
|
|
||||||
from fastapi_toolsets.schemas import PaginatedResponse
|
from fastapi_toolsets.schemas import PaginatedResponse
|
||||||
|
|
||||||
@router.get("")
|
@router.get("")
|
||||||
async def list_users(
|
async def list_users(
|
||||||
session: SessionDep,
|
session: SessionDep,
|
||||||
pagination_type: PaginationType = PaginationType.OFFSET,
|
params: Annotated[dict, Depends(UserCrud.paginate_params())],
|
||||||
page: int = Query(1, ge=1, description="Current page (offset only)"),
|
|
||||||
cursor: str | None = Query(None, description="Cursor token (cursor only)"),
|
|
||||||
items_per_page: int = Query(20, ge=1, le=100),
|
|
||||||
) -> PaginatedResponse[UserRead]:
|
) -> PaginatedResponse[UserRead]:
|
||||||
return await UserCrud.paginate(
|
return await UserCrud.paginate(session, **params, schema=UserRead)
|
||||||
session,
|
|
||||||
pagination_type=pagination_type,
|
|
||||||
page=page,
|
|
||||||
cursor=cursor,
|
|
||||||
items_per_page=items_per_page,
|
|
||||||
schema=UserRead,
|
|
||||||
)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
```
|
```
|
||||||
@@ -342,25 +285,6 @@ GET /users?pagination_type=offset&page=2&items_per_page=10
|
|||||||
GET /users?pagination_type=cursor&cursor=eyJ2YWx1ZSI6...&items_per_page=10
|
GET /users?pagination_type=cursor&cursor=eyJ2YWx1ZSI6...&items_per_page=10
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Pagination params dependency
|
|
||||||
|
|
||||||
!!! info "Added in `v2.4.1`"
|
|
||||||
|
|
||||||
Use [`paginate_params()`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.paginate_params) to inject all parameters at once with configurable defaults and a `max_page_size` cap:
|
|
||||||
|
|
||||||
```python
|
|
||||||
from typing import Annotated
|
|
||||||
from fastapi import Depends
|
|
||||||
from fastapi_toolsets.schemas import PaginatedResponse
|
|
||||||
|
|
||||||
@router.get("")
|
|
||||||
async def list_users(
|
|
||||||
session: SessionDep,
|
|
||||||
params: Annotated[dict, Depends(UserCrud.paginate_params(default_page_size=20, max_page_size=100))],
|
|
||||||
) -> PaginatedResponse[UserRead]:
|
|
||||||
return await UserCrud.paginate(session, **params, schema=UserRead)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Search
|
## Search
|
||||||
|
|
||||||
Two search strategies are available, both compatible with [`offset_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.offset_paginate) and [`cursor_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.cursor_paginate).
|
Two search strategies are available, both compatible with [`offset_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.offset_paginate) and [`cursor_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.cursor_paginate).
|
||||||
@@ -406,34 +330,18 @@ This allows searching with both [`offset_paginate`](../reference/crud.md#fastapi
|
|||||||
@router.get("")
|
@router.get("")
|
||||||
async def get_users(
|
async def get_users(
|
||||||
session: SessionDep,
|
session: SessionDep,
|
||||||
items_per_page: int = 50,
|
params: Annotated[dict, Depends(UserCrud.offset_paginate_params())],
|
||||||
page: int = 1,
|
|
||||||
search: str | None = None,
|
|
||||||
) -> OffsetPaginatedResponse[UserRead]:
|
) -> OffsetPaginatedResponse[UserRead]:
|
||||||
return await UserCrud.offset_paginate(
|
return await UserCrud.offset_paginate(session=session, **params, schema=UserRead)
|
||||||
session=session,
|
|
||||||
items_per_page=items_per_page,
|
|
||||||
page=page,
|
|
||||||
search=search,
|
|
||||||
schema=UserRead,
|
|
||||||
)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
```python
|
```python
|
||||||
@router.get("")
|
@router.get("")
|
||||||
async def get_users(
|
async def get_users(
|
||||||
session: SessionDep,
|
session: SessionDep,
|
||||||
cursor: str | None = None,
|
params: Annotated[dict, Depends(UserCrud.cursor_paginate_params())],
|
||||||
items_per_page: int = 50,
|
|
||||||
search: str | None = None,
|
|
||||||
) -> CursorPaginatedResponse[UserRead]:
|
) -> CursorPaginatedResponse[UserRead]:
|
||||||
return await UserCrud.cursor_paginate(
|
return await UserCrud.cursor_paginate(session=session, **params, schema=UserRead)
|
||||||
session=session,
|
|
||||||
items_per_page=items_per_page,
|
|
||||||
cursor=cursor,
|
|
||||||
search=search,
|
|
||||||
schema=UserRead,
|
|
||||||
)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Faceted search
|
### Faceted search
|
||||||
@@ -474,7 +382,7 @@ The distinct values are returned in the `filter_attributes` field of [`Paginated
|
|||||||
"filter_attributes": {
|
"filter_attributes": {
|
||||||
"status": ["active", "inactive"],
|
"status": ["active", "inactive"],
|
||||||
"country": ["DE", "FR", "US"],
|
"country": ["DE", "FR", "US"],
|
||||||
"name": ["admin", "editor", "viewer"]
|
"role__name": ["admin", "editor", "viewer"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -482,11 +390,11 @@ The distinct values are returned in the `filter_attributes` field of [`Paginated
|
|||||||
Use `filter_by` to pass the client's chosen filter values directly — no need to build SQLAlchemy conditions by hand. Any unknown key raises [`InvalidFacetFilterError`](../reference/exceptions.md#fastapi_toolsets.exceptions.exceptions.InvalidFacetFilterError).
|
Use `filter_by` to pass the client's chosen filter values directly — no need to build SQLAlchemy conditions by hand. Any unknown key raises [`InvalidFacetFilterError`](../reference/exceptions.md#fastapi_toolsets.exceptions.exceptions.InvalidFacetFilterError).
|
||||||
|
|
||||||
!!! info "The keys in `filter_by` are the same keys the client received in `filter_attributes`."
|
!!! info "The keys in `filter_by` are the same keys the client received in `filter_attributes`."
|
||||||
Keys are normally the terminal `column.key` (e.g. `"name"` for `Role.name`). When two facet fields share the same column key (e.g. `(Build.project, Project.name)` and `(Build.os, Os.name)`), the relationship name is prepended automatically: `"project__name"` and `"os__name"`.
|
Keys use `__` as a separator for the full relationship chain. A direct column `User.status` produces `"status"`. A relationship tuple `(User.role, Role.name)` produces `"role__name"`. A deeper chain `(User.role, Role.permission, Permission.name)` produces `"role__permission__name"`.
|
||||||
|
|
||||||
`filter_by` and `filters` can be combined — both are applied with AND logic.
|
`filter_by` and `filters` can be combined — both are applied with AND logic.
|
||||||
|
|
||||||
Use [`filter_params()`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.filter_params) to generate a dict with the facet filter values from the query parameters:
|
Facet filtering is built into the consolidated params dependencies. When `filter=True` (the default), facet fields are exposed as query parameters and collected into `filter_by` automatically:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from typing import Annotated
|
from typing import Annotated
|
||||||
@@ -501,13 +409,11 @@ UserCrud = CrudFactory(
|
|||||||
@router.get("", response_model_exclude_none=True)
|
@router.get("", response_model_exclude_none=True)
|
||||||
async def list_users(
|
async def list_users(
|
||||||
session: SessionDep,
|
session: SessionDep,
|
||||||
page: int = 1,
|
params: Annotated[dict, Depends(UserCrud.offset_paginate_params())],
|
||||||
filter_by: Annotated[dict[str, list[str]], Depends(UserCrud.filter_params())],
|
|
||||||
) -> OffsetPaginatedResponse[UserRead]:
|
) -> OffsetPaginatedResponse[UserRead]:
|
||||||
return await UserCrud.offset_paginate(
|
return await UserCrud.offset_paginate(
|
||||||
session=session,
|
session=session,
|
||||||
page=page,
|
**params,
|
||||||
filter_by=filter_by,
|
|
||||||
schema=UserRead,
|
schema=UserRead,
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
@@ -517,7 +423,7 @@ Both single-value and multi-value query parameters work:
|
|||||||
```
|
```
|
||||||
GET /users?status=active → filter_by={"status": ["active"]}
|
GET /users?status=active → filter_by={"status": ["active"]}
|
||||||
GET /users?status=active&country=FR → filter_by={"status": ["active"], "country": ["FR"]}
|
GET /users?status=active&country=FR → filter_by={"status": ["active"], "country": ["FR"]}
|
||||||
GET /users?role=admin&role=editor → filter_by={"role": ["admin", "editor"]} (IN clause)
|
GET /users?role__name=admin&role__name=editor → filter_by={"role__name": ["admin", "editor"]} (IN clause)
|
||||||
```
|
```
|
||||||
|
|
||||||
## Sorting
|
## Sorting
|
||||||
@@ -536,20 +442,21 @@ UserCrud = CrudFactory(
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
Call [`order_params()`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.order_params) to generate a FastAPI dependency that maps the query parameters to an [`OrderByClause`](../reference/crud.md#fastapi_toolsets.crud.factory.OrderByClause) expression:
|
Ordering is built into the consolidated params dependencies. When `order=True` (the default), `order_by` and `order` query parameters are exposed and resolved into an `OrderByClause` automatically:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from typing import Annotated
|
from typing import Annotated
|
||||||
|
|
||||||
from fastapi import Depends
|
from fastapi import Depends
|
||||||
from fastapi_toolsets.crud import OrderByClause
|
|
||||||
|
|
||||||
@router.get("")
|
@router.get("")
|
||||||
async def list_users(
|
async def list_users(
|
||||||
session: SessionDep,
|
session: SessionDep,
|
||||||
order_by: Annotated[OrderByClause | None, Depends(UserCrud.order_params())],
|
params: Annotated[dict, Depends(UserCrud.offset_paginate_params(
|
||||||
|
default_order_field=User.created_at,
|
||||||
|
))],
|
||||||
) -> OffsetPaginatedResponse[UserRead]:
|
) -> OffsetPaginatedResponse[UserRead]:
|
||||||
return await UserCrud.offset_paginate(session=session, order_by=order_by, schema=UserRead)
|
return await UserCrud.offset_paginate(session=session, **params, schema=UserRead)
|
||||||
```
|
```
|
||||||
|
|
||||||
The dependency adds two query parameters to the endpoint:
|
The dependency adds two query parameters to the endpoint:
|
||||||
@@ -566,10 +473,10 @@ GET /users?order_by=name&order=desc → ORDER BY users.name DESC
|
|||||||
|
|
||||||
An unknown `order_by` value raises [`InvalidOrderFieldError`](../reference/exceptions.md#fastapi_toolsets.exceptions.exceptions.InvalidOrderFieldError) (HTTP 422).
|
An unknown `order_by` value raises [`InvalidOrderFieldError`](../reference/exceptions.md#fastapi_toolsets.exceptions.exceptions.InvalidOrderFieldError) (HTTP 422).
|
||||||
|
|
||||||
You can also pass `order_fields` directly to `order_params()` to override the class-level defaults without modifying them:
|
You can also pass `order_fields` directly to override the class-level defaults:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
UserOrderParams = UserCrud.order_params(order_fields=[User.name])
|
params = UserCrud.offset_paginate_params(order_fields=[User.name])
|
||||||
```
|
```
|
||||||
|
|
||||||
## Relationship loading
|
## Relationship loading
|
||||||
@@ -656,12 +563,11 @@ async def get_user(session: SessionDep, uuid: UUID) -> Response[UserRead]:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@router.get("")
|
@router.get("")
|
||||||
async def list_users(session: SessionDep, page: int = 1) -> OffsetPaginatedResponse[UserRead]:
|
async def list_users(
|
||||||
return await crud.UserCrud.offset_paginate(
|
session: SessionDep,
|
||||||
session=session,
|
params: Annotated[dict, Depends(crud.UserCrud.offset_paginate_params())],
|
||||||
page=page,
|
) -> OffsetPaginatedResponse[UserRead]:
|
||||||
schema=UserRead,
|
return await crud.UserCrud.offset_paginate(session=session, **params, schema=UserRead)
|
||||||
)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
The schema must have `from_attributes=True` (or inherit from [`PydanticBase`](../reference/schemas.md#fastapi_toolsets.schemas.PydanticBase)) so it can be built from SQLAlchemy model instances.
|
The schema must have `from_attributes=True` (or inherit from [`PydanticBase`](../reference/schemas.md#fastapi_toolsets.schemas.PydanticBase)) so it can be built from SQLAlchemy model instances.
|
||||||
|
|||||||
@@ -38,18 +38,20 @@ By context with [`load_fixtures_by_context`](../reference/fixtures.md#fastapi_to
|
|||||||
from fastapi_toolsets.fixtures import load_fixtures_by_context
|
from fastapi_toolsets.fixtures import load_fixtures_by_context
|
||||||
|
|
||||||
async with db_context() as session:
|
async with db_context() as session:
|
||||||
await load_fixtures_by_context(session=session, registry=fixtures, context=Context.TESTING)
|
await load_fixtures_by_context(session, fixtures, Context.TESTING)
|
||||||
```
|
```
|
||||||
|
|
||||||
Directly with [`load_fixtures`](../reference/fixtures.md#fastapi_toolsets.fixtures.utils.load_fixtures):
|
Directly by name with [`load_fixtures`](../reference/fixtures.md#fastapi_toolsets.fixtures.utils.load_fixtures):
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from fastapi_toolsets.fixtures import load_fixtures
|
from fastapi_toolsets.fixtures import load_fixtures
|
||||||
|
|
||||||
async with db_context() as session:
|
async with db_context() as session:
|
||||||
await load_fixtures(session=session, registry=fixtures)
|
await load_fixtures(session, fixtures, "roles", "test_users")
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Both functions return a `dict[str, list[...]]` mapping each fixture name to the list of loaded instances.
|
||||||
|
|
||||||
## Contexts
|
## Contexts
|
||||||
|
|
||||||
[`Context`](../reference/fixtures.md#fastapi_toolsets.fixtures.enum.Context) is an enum with predefined values:
|
[`Context`](../reference/fixtures.md#fastapi_toolsets.fixtures.enum.Context) is an enum with predefined values:
|
||||||
@@ -58,10 +60,60 @@ async with db_context() as session:
|
|||||||
|---------|-------------|
|
|---------|-------------|
|
||||||
| `Context.BASE` | Core data required in all environments |
|
| `Context.BASE` | Core data required in all environments |
|
||||||
| `Context.TESTING` | Data only loaded during tests |
|
| `Context.TESTING` | Data only loaded during tests |
|
||||||
|
| `Context.DEVELOPMENT` | Data only loaded in development |
|
||||||
| `Context.PRODUCTION` | Data only loaded in production |
|
| `Context.PRODUCTION` | Data only loaded in production |
|
||||||
|
|
||||||
A fixture with no `contexts` defined takes `Context.BASE` by default.
|
A fixture with no `contexts` defined takes `Context.BASE` by default.
|
||||||
|
|
||||||
|
### Custom contexts
|
||||||
|
|
||||||
|
Plain strings and any `Enum` subclass are accepted wherever a `Context` enum is expected.
|
||||||
|
|
||||||
|
```python
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
class AppContext(str, Enum):
|
||||||
|
STAGING = "staging"
|
||||||
|
DEMO = "demo"
|
||||||
|
|
||||||
|
@fixtures.register(contexts=[AppContext.STAGING])
|
||||||
|
def staging_data():
|
||||||
|
return [Config(key="feature_x", enabled=True)]
|
||||||
|
|
||||||
|
await load_fixtures_by_context(session, fixtures, AppContext.STAGING)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Default context for a registry
|
||||||
|
|
||||||
|
Pass `contexts` to `FixtureRegistry` to set a default for all fixtures registered in it:
|
||||||
|
|
||||||
|
```python
|
||||||
|
testing_registry = FixtureRegistry(contexts=[Context.TESTING])
|
||||||
|
|
||||||
|
@testing_registry.register # implicitly contexts=[Context.TESTING]
|
||||||
|
def test_orders():
|
||||||
|
return [Order(id=1, total=99)]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Same fixture name, multiple context variants
|
||||||
|
|
||||||
|
The same fixture name may be registered under different (non-overlapping) context sets. When multiple contexts are loaded together, all matching variants are merged:
|
||||||
|
|
||||||
|
```python
|
||||||
|
@fixtures.register(contexts=[Context.BASE])
|
||||||
|
def users():
|
||||||
|
return [User(id=1, username="admin")]
|
||||||
|
|
||||||
|
@fixtures.register(contexts=[Context.TESTING])
|
||||||
|
def users():
|
||||||
|
return [User(id=2, username="tester")]
|
||||||
|
|
||||||
|
# loads both admin and tester
|
||||||
|
await load_fixtures_by_context(session, fixtures, Context.BASE, Context.TESTING)
|
||||||
|
```
|
||||||
|
|
||||||
|
Registering two variants with overlapping context sets raises `ValueError`.
|
||||||
|
|
||||||
## Load strategies
|
## Load strategies
|
||||||
|
|
||||||
[`LoadStrategy`](../reference/fixtures.md#fastapi_toolsets.fixtures.enum.LoadStrategy) controls how the fixture loader handles rows that already exist:
|
[`LoadStrategy`](../reference/fixtures.md#fastapi_toolsets.fixtures.enum.LoadStrategy) controls how the fixture loader handles rows that already exist:
|
||||||
@@ -69,20 +121,44 @@ A fixture with no `contexts` defined takes `Context.BASE` by default.
|
|||||||
| Strategy | Description |
|
| Strategy | Description |
|
||||||
|----------|-------------|
|
|----------|-------------|
|
||||||
| `LoadStrategy.INSERT` | Insert only, fail on duplicates |
|
| `LoadStrategy.INSERT` | Insert only, fail on duplicates |
|
||||||
| `LoadStrategy.UPSERT` | Insert or update on conflict |
|
| `LoadStrategy.MERGE` | Insert or update on conflict (default) |
|
||||||
| `LoadStrategy.SKIP` | Skip rows that already exist |
|
| `LoadStrategy.SKIP_EXISTING` | Skip rows that already exist |
|
||||||
|
|
||||||
|
```python
|
||||||
|
await load_fixtures_by_context(
|
||||||
|
session, fixtures, Context.BASE, strategy=LoadStrategy.SKIP_EXISTING
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
## Merging registries
|
## Merging registries
|
||||||
|
|
||||||
Split fixtures definitions across modules and merge them:
|
Split fixture definitions across modules and merge them:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from myapp.fixtures.dev import dev_fixtures
|
from myapp.fixtures.dev import dev_fixtures
|
||||||
from myapp.fixtures.prod import prod_fixtures
|
from myapp.fixtures.prod import prod_fixtures
|
||||||
|
|
||||||
fixtures = fixturesRegistry()
|
fixtures = FixtureRegistry()
|
||||||
fixtures.include_registry(registry=dev_fixtures)
|
fixtures.include_registry(registry=dev_fixtures)
|
||||||
fixtures.include_registry(registry=prod_fixtures)
|
fixtures.include_registry(registry=prod_fixtures)
|
||||||
|
```
|
||||||
|
|
||||||
|
Fixtures with the same name are allowed as long as their context sets do not overlap. Conflicting contexts raise `ValueError`.
|
||||||
|
|
||||||
|
## Looking up fixture instances
|
||||||
|
|
||||||
|
[`get_obj_by_attr`](../reference/fixtures.md#fastapi_toolsets.fixtures.utils.get_obj_by_attr) retrieves a specific instance from a fixture function by attribute value — useful when building cross-fixture `depends_on` relationships:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from fastapi_toolsets.fixtures import get_obj_by_attr
|
||||||
|
|
||||||
|
@fixtures.register(depends_on=["roles"])
|
||||||
|
def users():
|
||||||
|
admin_role = get_obj_by_attr(roles, "name", "admin")
|
||||||
|
return [User(id=1, username="alice", role_id=admin_role.id)]
|
||||||
|
```
|
||||||
|
|
||||||
|
Raises `StopIteration` if no matching instance is found.
|
||||||
|
|
||||||
## Pytest integration
|
## Pytest integration
|
||||||
|
|
||||||
@@ -111,7 +187,6 @@ async def test_user_can_login(fixture_users: list[User], fixture_roles: list[Rol
|
|||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
The load order is resolved automatically from the `depends_on` declarations in your registry. Each generated fixture receives `db_session` as a dependency and returns the list of loaded model instances.
|
The load order is resolved automatically from the `depends_on` declarations in your registry. Each generated fixture receives `db_session` as a dependency and returns the list of loaded model instances.
|
||||||
|
|
||||||
## CLI integration
|
## CLI integration
|
||||||
|
|||||||
@@ -117,139 +117,118 @@ class Article(Base, UUIDMixin, TimestampMixin):
|
|||||||
title: Mapped[str]
|
title: Mapped[str]
|
||||||
```
|
```
|
||||||
|
|
||||||
### [`WatchedFieldsMixin`](../reference/models.md#fastapi_toolsets.models.WatchedFieldsMixin)
|
## Lifecycle events
|
||||||
|
|
||||||
!!! info "Added in `v2.4`"
|
The event system provides lifecycle callbacks that fire **after commit**. If the transaction rolls back, no callback fires.
|
||||||
|
|
||||||
`WatchedFieldsMixin` provides lifecycle callbacks that fire **after commit** — meaning the row is durably persisted when your callback runs. If the transaction rolls back, no callback fires.
|
### Setup
|
||||||
|
|
||||||
Three callbacks are available, each corresponding to a [`ModelEvent`](../reference/models.md#fastapi_toolsets.models.ModelEvent) value:
|
Event dispatch requires [`EventSession`](../reference/models.md#fastapi_toolsets.models.EventSession). Pass it as the session class when creating your session factory:
|
||||||
|
|
||||||
| Callback | Event | Trigger |
|
|
||||||
|---|---|---|
|
|
||||||
| `on_create()` | `ModelEvent.CREATE` | After `INSERT` |
|
|
||||||
| `on_delete()` | `ModelEvent.DELETE` | After `DELETE` |
|
|
||||||
| `on_update(changes)` | `ModelEvent.UPDATE` | After `UPDATE` on a watched field |
|
|
||||||
|
|
||||||
Server-side defaults (e.g. `id`, `created_at`) are fully populated in all callbacks. All callbacks support both `async def` and plain `def`. Use `@watch` to restrict which fields trigger `on_update`:
|
|
||||||
|
|
||||||
| Decorator | `on_update` behaviour |
|
|
||||||
|---|---|
|
|
||||||
| `@watch("status", "role")` | Only fires when `status` or `role` changes |
|
|
||||||
| *(no decorator)* | Fires when **any** mapped field changes |
|
|
||||||
|
|
||||||
`@watch` is inherited through the class hierarchy. If a subclass does not declare its own `@watch`, it uses the filter from the nearest decorated parent. Applying `@watch` on the subclass overrides the parent's filter:
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
@watch("status")
|
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||||
class Order(Base, UUIDMixin, WatchedFieldsMixin):
|
from fastapi_toolsets.models import EventSession
|
||||||
|
|
||||||
|
engine = create_async_engine("postgresql+asyncpg://...")
|
||||||
|
SessionLocal = async_sessionmaker(engine, expire_on_commit=False, class_=EventSession)
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! info "Callbacks fire on `session.commit()` only — not on savepoints."
|
||||||
|
Savepoints created by [`get_transaction`](db.md) or `begin_nested()` do **not**
|
||||||
|
trigger callbacks. All events accumulated across flushes are dispatched once
|
||||||
|
when the outermost `commit()` is called.
|
||||||
|
|
||||||
|
### Events
|
||||||
|
|
||||||
|
Three event types are available, each corresponding to a [`ModelEvent`](../reference/models.md#fastapi_toolsets.models.ModelEvent) value:
|
||||||
|
|
||||||
|
| Event | Trigger |
|
||||||
|
|---|---|
|
||||||
|
| `ModelEvent.CREATE` | After `INSERT` commit |
|
||||||
|
| `ModelEvent.DELETE` | After `DELETE` commit |
|
||||||
|
| `ModelEvent.UPDATE` | After `UPDATE` commit on a watched field |
|
||||||
|
|
||||||
|
!!! warning "Callbacks fire only for ORM-level changes. Rows updated via raw SQL (`UPDATE ... SET ...`) are not detected."
|
||||||
|
|
||||||
|
### Watched fields
|
||||||
|
|
||||||
|
Set `__watched_fields__` on the model to restrict which field changes trigger `UPDATE` events. It must be a `tuple[str, ...]` — any other type raises `TypeError`:
|
||||||
|
|
||||||
|
| Class attribute | `UPDATE` behaviour |
|
||||||
|
|---|---|
|
||||||
|
| `__watched_fields__ = ("status", "role")` | Only fires when `status` or `role` changes |
|
||||||
|
| *(not set)* | Fires when **any** mapped field changes |
|
||||||
|
|
||||||
|
`__watched_fields__` is inherited through the class hierarchy via normal Python MRO. A subclass can override it:
|
||||||
|
|
||||||
|
```python
|
||||||
|
class Order(Base, UUIDMixin):
|
||||||
|
__watched_fields__ = ("status",)
|
||||||
...
|
...
|
||||||
|
|
||||||
class UrgentOrder(Order):
|
class UrgentOrder(Order):
|
||||||
# inherits @watch("status") — on_update fires only for status changes
|
# inherits __watched_fields__ = ("status",)
|
||||||
...
|
...
|
||||||
|
|
||||||
@watch("priority")
|
|
||||||
class PriorityOrder(Order):
|
class PriorityOrder(Order):
|
||||||
# overrides parent — on_update fires only for priority changes
|
__watched_fields__ = ("priority",)
|
||||||
|
# overrides parent — UPDATE fires only for priority changes
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Option 1 — catch-all with `on_event`
|
### Registering handlers
|
||||||
|
|
||||||
Override `on_event` to handle all event types in one place. The specific methods delegate here by default:
|
Register handlers with the [`listens_for`](../reference/models.md#fastapi_toolsets.models.listens_for) decorator. Every callback receives three arguments: the model instance, the [`ModelEvent`](../reference/models.md#fastapi_toolsets.models.ModelEvent) that triggered it, and a `changes` dict (`None` for `CREATE` and `DELETE`):
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from fastapi_toolsets.models import ModelEvent, UUIDMixin, WatchedFieldsMixin, watch
|
from fastapi_toolsets.models import ModelEvent, UUIDMixin, listens_for
|
||||||
|
|
||||||
@watch("status")
|
class Order(Base, UUIDMixin):
|
||||||
class Order(Base, UUIDMixin, WatchedFieldsMixin):
|
|
||||||
__tablename__ = "orders"
|
__tablename__ = "orders"
|
||||||
|
__watched_fields__ = ("status",)
|
||||||
|
|
||||||
status: Mapped[str]
|
status: Mapped[str]
|
||||||
|
|
||||||
async def on_event(self, event: ModelEvent, changes: dict | None = None) -> None:
|
@listens_for(Order, [ModelEvent.CREATE])
|
||||||
if event == ModelEvent.CREATE:
|
async def on_order_created(order: Order, event_type: ModelEvent, changes: None):
|
||||||
await notify_new_order(self.id)
|
await notify_new_order(order.id)
|
||||||
elif event == ModelEvent.DELETE:
|
|
||||||
await notify_order_cancelled(self.id)
|
|
||||||
elif event == ModelEvent.UPDATE:
|
|
||||||
await notify_status_change(self.id, changes["status"])
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Option 2 — targeted overrides
|
@listens_for(Order, [ModelEvent.DELETE])
|
||||||
|
async def on_order_deleted(order: Order, event_type: ModelEvent, changes: None):
|
||||||
|
await notify_order_cancelled(order.id)
|
||||||
|
|
||||||
Override individual methods for more focused logic:
|
@listens_for(Order, [ModelEvent.UPDATE])
|
||||||
|
async def on_order_updated(order: Order, event_type: ModelEvent, changes: dict):
|
||||||
```python
|
|
||||||
@watch("status")
|
|
||||||
class Order(Base, UUIDMixin, WatchedFieldsMixin):
|
|
||||||
__tablename__ = "orders"
|
|
||||||
|
|
||||||
status: Mapped[str]
|
|
||||||
|
|
||||||
async def on_create(self) -> None:
|
|
||||||
await notify_new_order(self.id)
|
|
||||||
|
|
||||||
async def on_delete(self) -> None:
|
|
||||||
await notify_order_cancelled(self.id)
|
|
||||||
|
|
||||||
async def on_update(self, changes: dict) -> None:
|
|
||||||
if "status" in changes:
|
if "status" in changes:
|
||||||
old = changes["status"]["old"]
|
await notify_status_change(order.id, changes["status"])
|
||||||
new = changes["status"]["new"]
|
|
||||||
await notify_status_change(self.id, old, new)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Field changes format
|
Multiple handlers can be registered for the same model and event. Handlers registered on a parent class also fire for subclass instances.
|
||||||
|
|
||||||
The `changes` dict maps each watched field that changed to `{"old": ..., "new": ...}`. Only fields that actually changed are included:
|
A single handler can listen for multiple events at once. When `event_types` is omitted, the handler fires for all events:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
|
@listens_for(Order, [ModelEvent.CREATE, ModelEvent.UPDATE])
|
||||||
|
async def on_order_changed(order: Order, event_type: ModelEvent, changes: dict | None):
|
||||||
|
await invalidate_cache(order.id)
|
||||||
|
|
||||||
|
@listens_for(Order) # all events
|
||||||
|
async def on_any_order_event(order: Order, event_type: ModelEvent, changes: dict | None):
|
||||||
|
await audit_log(order.id, event_type)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Field changes format
|
||||||
|
|
||||||
|
The `changes` dict maps each watched field that changed to `{"old": ..., "new": ...}`. Only fields that actually changed are included. For `CREATE` and `DELETE` events, `changes` is `None`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# CREATE / DELETE → changes is None
|
||||||
# status changed → {"status": {"old": "pending", "new": "shipped"}}
|
# status changed → {"status": {"old": "pending", "new": "shipped"}}
|
||||||
# two fields changed → {"status": {...}, "assigned_to": {...}}
|
# two fields changed → {"status": {...}, "assigned_to": {...}}
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! info "Multiple flushes in one transaction are merged: the earliest `old` and latest `new` are preserved, and `on_update` fires only once per commit."
|
!!! info "Multiple flushes in one transaction are merged: the earliest `old` and latest `new` are preserved, and `on_update` fires only once per commit."
|
||||||
|
|
||||||
!!! warning "Callbacks fire only for ORM-level changes. Rows updated via raw SQL (`UPDATE ... SET ...`) are not detected."
|
|
||||||
|
|
||||||
!!! warning "Callbacks fire when the **outermost active context** (savepoint or transaction) commits."
|
|
||||||
If you create several related objects using `CrudFactory.create` and need
|
|
||||||
callbacks to see all of them (including associations), wrap the whole
|
|
||||||
operation in a single [`get_transaction`](db.md) or [`lock_tables`](db.md)
|
|
||||||
block. Without it, each `create` call commits its own savepoint and
|
|
||||||
`on_create` fires before the remaining objects exist.
|
|
||||||
|
|
||||||
```python
|
|
||||||
from fastapi_toolsets.db import get_transaction
|
|
||||||
|
|
||||||
async with get_transaction(session):
|
|
||||||
order = await OrderCrud.create(session, order_data)
|
|
||||||
item = await ItemCrud.create(session, item_data)
|
|
||||||
await session.refresh(order, attribute_names=["items"])
|
|
||||||
order.items.append(item)
|
|
||||||
# on_create fires here for both order and item,
|
|
||||||
# with the full association already committed.
|
|
||||||
```
|
|
||||||
|
|
||||||
## Composing mixins
|
|
||||||
|
|
||||||
All mixins can be combined in any order. The only constraint is that exactly one primary key must be defined — either via `UUIDMixin` or directly on the model.
|
|
||||||
|
|
||||||
```python
|
|
||||||
from fastapi_toolsets.models import UUIDMixin, TimestampMixin
|
|
||||||
|
|
||||||
class Event(Base, UUIDMixin, TimestampMixin):
|
|
||||||
__tablename__ = "events"
|
|
||||||
name: Mapped[str]
|
|
||||||
|
|
||||||
class Counter(Base, UpdatedAtMixin):
|
|
||||||
__tablename__ = "counters"
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
|
|
||||||
value: Mapped[int]
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
[:material-api: API Reference](../reference/models.md)
|
[:material-api: API Reference](../reference/models.md)
|
||||||
|
|||||||
267
docs/module/security.md
Normal file
267
docs/module/security.md
Normal file
@@ -0,0 +1,267 @@
|
|||||||
|
# Security
|
||||||
|
|
||||||
|
Composable authentication helpers for FastAPI that use `Security()` for OpenAPI documentation and accept user-provided validator functions with full type flexibility.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The `security` module provides four auth source classes and a `MultiAuth` factory. Each class wraps a FastAPI security scheme for OpenAPI and accepts a validator function called as:
|
||||||
|
|
||||||
|
```python
|
||||||
|
await validator(credential, **kwargs)
|
||||||
|
```
|
||||||
|
|
||||||
|
where `kwargs` are the extra keyword arguments provided at instantiation (roles, permissions, enums, etc.). The validator returns the authenticated identity (e.g. a `User` model) which becomes the route dependency value.
|
||||||
|
|
||||||
|
```python
|
||||||
|
from fastapi import Security
|
||||||
|
from fastapi_toolsets.security import BearerTokenAuth
|
||||||
|
|
||||||
|
async def verify_token(token: str, *, role: str) -> User:
|
||||||
|
user = await db.get_by_token(token)
|
||||||
|
if not user or user.role != role:
|
||||||
|
raise UnauthorizedError()
|
||||||
|
return user
|
||||||
|
|
||||||
|
bearer_admin = BearerTokenAuth(verify_token, role="admin")
|
||||||
|
|
||||||
|
@app.get("/admin")
|
||||||
|
async def admin_route(user: User = Security(bearer_admin)):
|
||||||
|
return user
|
||||||
|
```
|
||||||
|
|
||||||
|
## Auth sources
|
||||||
|
|
||||||
|
### [`BearerTokenAuth`](../reference/security.md#fastapi_toolsets.security.BearerTokenAuth)
|
||||||
|
|
||||||
|
Reads the `Authorization: Bearer <token>` header. Wraps `HTTPBearer` for OpenAPI.
|
||||||
|
|
||||||
|
```python
|
||||||
|
from fastapi_toolsets.security import BearerTokenAuth
|
||||||
|
|
||||||
|
bearer = BearerTokenAuth(validator=verify_token)
|
||||||
|
|
||||||
|
@app.get("/me")
|
||||||
|
async def me(user: User = Security(bearer)):
|
||||||
|
return user
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Token prefix
|
||||||
|
|
||||||
|
The optional `prefix` parameter restricts a `BearerTokenAuth` instance to tokens
|
||||||
|
that start with a given string. The prefix is **kept** in the value passed to the
|
||||||
|
validator — store and compare tokens with their prefix included.
|
||||||
|
|
||||||
|
This lets you deploy multiple `BearerTokenAuth` instances in the same application
|
||||||
|
and disambiguate them efficiently in `MultiAuth`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
user_bearer = BearerTokenAuth(verify_user, prefix="user_") # matches "Bearer user_..."
|
||||||
|
org_bearer = BearerTokenAuth(verify_org, prefix="org_") # matches "Bearer org_..."
|
||||||
|
```
|
||||||
|
|
||||||
|
Use [`generate_token()`](#token-generation) to create correctly-prefixed tokens.
|
||||||
|
|
||||||
|
#### Token generation
|
||||||
|
|
||||||
|
`BearerTokenAuth.generate_token()` produces a secure random token ready to store
|
||||||
|
in your database and return to the client. If a prefix is configured it is
|
||||||
|
prepended automatically:
|
||||||
|
|
||||||
|
```python
|
||||||
|
bearer = BearerTokenAuth(verify_token, prefix="user_")
|
||||||
|
|
||||||
|
token = bearer.generate_token() # e.g. "user_Xk3mN..."
|
||||||
|
await db.store_token(user_id, token)
|
||||||
|
return {"access_token": token, "token_type": "bearer"}
|
||||||
|
```
|
||||||
|
|
||||||
|
The client sends `Authorization: Bearer user_Xk3mN...` and the validator receives
|
||||||
|
the full token (prefix included) to compare against the stored value.
|
||||||
|
|
||||||
|
### [`CookieAuth`](../reference/security.md#fastapi_toolsets.security.CookieAuth)
|
||||||
|
|
||||||
|
Reads a named cookie. Wraps `APIKeyCookie` for OpenAPI.
|
||||||
|
|
||||||
|
```python
|
||||||
|
from fastapi_toolsets.security import CookieAuth
|
||||||
|
|
||||||
|
cookie_auth = CookieAuth("session", validator=verify_session)
|
||||||
|
|
||||||
|
@app.get("/me")
|
||||||
|
async def me(user: User = Security(cookie_auth)):
|
||||||
|
return user
|
||||||
|
```
|
||||||
|
|
||||||
|
### [`OAuth2Auth`](../reference/security.md#fastapi_toolsets.security.OAuth2Auth)
|
||||||
|
|
||||||
|
Reads the `Authorization: Bearer <token>` header and registers the token endpoint
|
||||||
|
in OpenAPI via `OAuth2PasswordBearer`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
from fastapi_toolsets.security import OAuth2Auth
|
||||||
|
|
||||||
|
oauth2_auth = OAuth2Auth(token_url="/token", validator=verify_token)
|
||||||
|
|
||||||
|
@app.get("/me")
|
||||||
|
async def me(user: User = Security(oauth2_auth)):
|
||||||
|
return user
|
||||||
|
```
|
||||||
|
|
||||||
|
### [`OpenIDAuth`](../reference/security.md#fastapi_toolsets.security.OpenIDAuth)
|
||||||
|
|
||||||
|
Reads the `Authorization: Bearer <token>` header and registers the OpenID Connect
|
||||||
|
discovery URL in OpenAPI via `OpenIdConnect`. Token validation is fully delegated
|
||||||
|
to your validator — use any OIDC / JWT library (`authlib`, `python-jose`, `PyJWT`).
|
||||||
|
|
||||||
|
```python
|
||||||
|
from fastapi_toolsets.security import OpenIDAuth
|
||||||
|
|
||||||
|
async def verify_google_token(token: str, *, audience: str) -> User:
|
||||||
|
payload = jwt.decode(token, google_public_keys, algorithms=["RS256"],
|
||||||
|
audience=audience)
|
||||||
|
return User(email=payload["email"], name=payload["name"])
|
||||||
|
|
||||||
|
google_auth = OpenIDAuth(
|
||||||
|
"https://accounts.google.com/.well-known/openid-configuration",
|
||||||
|
verify_google_token,
|
||||||
|
audience="my-client-id",
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.get("/me")
|
||||||
|
async def me(user: User = Security(google_auth)):
|
||||||
|
return user
|
||||||
|
```
|
||||||
|
|
||||||
|
The discovery URL is used **only for OpenAPI documentation** — no requests are made
|
||||||
|
to it by this class. You are responsible for fetching and caching the provider's
|
||||||
|
public keys in your validator.
|
||||||
|
|
||||||
|
Multiple providers work naturally with `MultiAuth`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
multi = MultiAuth(google_auth, github_auth)
|
||||||
|
|
||||||
|
@app.get("/data")
|
||||||
|
async def data(user: User = Security(multi)):
|
||||||
|
return user
|
||||||
|
```
|
||||||
|
|
||||||
|
## Typed validator kwargs
|
||||||
|
|
||||||
|
All auth classes forward extra instantiation keyword arguments to the validator.
|
||||||
|
Arguments can be any type — enums, strings, integers, etc. The validator returns
|
||||||
|
the authenticated identity, which FastAPI injects directly into the route handler.
|
||||||
|
|
||||||
|
```python
|
||||||
|
async def verify_token(token: str, *, role: Role, permission: str) -> User:
|
||||||
|
user = await decode_token(token)
|
||||||
|
if user.role != role or permission not in user.permissions:
|
||||||
|
raise UnauthorizedError()
|
||||||
|
return user
|
||||||
|
|
||||||
|
bearer = BearerTokenAuth(verify_token, role=Role.ADMIN, permission="billing:read")
|
||||||
|
```
|
||||||
|
|
||||||
|
Each auth instance is self-contained — create a separate instance per distinct
|
||||||
|
requirement instead of passing requirements through `Security(scopes=[...])`.
|
||||||
|
|
||||||
|
### Using `.require()` inline
|
||||||
|
|
||||||
|
If declaring a new top-level variable per role feels verbose, use `.require()` to
|
||||||
|
create a configured clone directly in the route decorator. The original instance
|
||||||
|
is not mutated:
|
||||||
|
|
||||||
|
```python
|
||||||
|
bearer = BearerTokenAuth(verify_token)
|
||||||
|
|
||||||
|
@app.get("/admin/stats")
|
||||||
|
async def admin_stats(user: User = Security(bearer.require(role=Role.ADMIN))):
|
||||||
|
return {"message": f"Hello admin {user.name}"}
|
||||||
|
|
||||||
|
@app.get("/profile")
|
||||||
|
async def profile(user: User = Security(bearer.require(role=Role.USER))):
|
||||||
|
return {"id": user.id, "name": user.name}
|
||||||
|
```
|
||||||
|
|
||||||
|
`.require()` kwargs are merged over existing ones — new values win on conflict.
|
||||||
|
The `prefix` (for `BearerTokenAuth`) and cookie name (for `CookieAuth`) are
|
||||||
|
always preserved.
|
||||||
|
|
||||||
|
`.require()` instances work transparently inside `MultiAuth`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
multi = MultiAuth(
|
||||||
|
user_bearer.require(role=Role.USER),
|
||||||
|
org_bearer.require(role=Role.ADMIN),
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
## MultiAuth
|
||||||
|
|
||||||
|
[`MultiAuth`](../reference/security.md#fastapi_toolsets.security.MultiAuth) combines
|
||||||
|
multiple auth sources into a single callable. Sources are tried in order; the
|
||||||
|
first one that finds a credential wins.
|
||||||
|
|
||||||
|
```python
|
||||||
|
from fastapi_toolsets.security import MultiAuth
|
||||||
|
|
||||||
|
multi = MultiAuth(user_bearer, org_bearer, cookie_auth)
|
||||||
|
|
||||||
|
@app.get("/data")
|
||||||
|
async def data_route(user = Security(multi)):
|
||||||
|
return user
|
||||||
|
```
|
||||||
|
|
||||||
|
### Using `.require()` on MultiAuth
|
||||||
|
|
||||||
|
`MultiAuth` also supports `.require()`, which propagates the kwargs to every
|
||||||
|
source that implements it. Sources that do not (e.g. custom `AuthSource`
|
||||||
|
subclasses) are passed through unchanged:
|
||||||
|
|
||||||
|
```python
|
||||||
|
multi = MultiAuth(bearer, cookie)
|
||||||
|
|
||||||
|
@app.get("/admin")
|
||||||
|
async def admin(user: User = Security(multi.require(role=Role.ADMIN))):
|
||||||
|
return user
|
||||||
|
```
|
||||||
|
|
||||||
|
This is equivalent to calling `.require()` on each source individually:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# These two are identical
|
||||||
|
multi.require(role=Role.ADMIN)
|
||||||
|
|
||||||
|
MultiAuth(
|
||||||
|
bearer.require(role=Role.ADMIN),
|
||||||
|
cookie.require(role=Role.ADMIN),
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Prefix-based dispatch
|
||||||
|
|
||||||
|
Because `extract()` is pure string matching (no I/O), prefix-based source
|
||||||
|
selection is essentially free. Only the matching source's validator (which may
|
||||||
|
involve DB or network I/O) is ever called:
|
||||||
|
|
||||||
|
```python
|
||||||
|
user_bearer = BearerTokenAuth(verify_user, prefix="user_")
|
||||||
|
org_bearer = BearerTokenAuth(verify_org, prefix="org_")
|
||||||
|
|
||||||
|
multi = MultiAuth(user_bearer, org_bearer)
|
||||||
|
|
||||||
|
# "Bearer user_alice" → only verify_user runs, receives "user_alice"
|
||||||
|
# "Bearer org_acme" → only verify_org runs, receives "org_acme"
|
||||||
|
```
|
||||||
|
|
||||||
|
Tokens are stored and compared **with their prefix** — use `generate_token()` on
|
||||||
|
each source to issue correctly-prefixed tokens:
|
||||||
|
|
||||||
|
```python
|
||||||
|
user_token = user_bearer.generate_token() # "user_..."
|
||||||
|
org_token = org_bearer.generate_token() # "org_..."
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[:material-api: API Reference](../reference/security.md)
|
||||||
@@ -6,17 +6,19 @@ You can import them directly from `fastapi_toolsets.models`:
|
|||||||
|
|
||||||
```python
|
```python
|
||||||
from fastapi_toolsets.models import (
|
from fastapi_toolsets.models import (
|
||||||
|
EventSession,
|
||||||
ModelEvent,
|
ModelEvent,
|
||||||
UUIDMixin,
|
UUIDMixin,
|
||||||
UUIDv7Mixin,
|
UUIDv7Mixin,
|
||||||
CreatedAtMixin,
|
CreatedAtMixin,
|
||||||
UpdatedAtMixin,
|
UpdatedAtMixin,
|
||||||
TimestampMixin,
|
TimestampMixin,
|
||||||
WatchedFieldsMixin,
|
listens_for,
|
||||||
watch,
|
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## ::: fastapi_toolsets.models.EventSession
|
||||||
|
|
||||||
## ::: fastapi_toolsets.models.ModelEvent
|
## ::: fastapi_toolsets.models.ModelEvent
|
||||||
|
|
||||||
## ::: fastapi_toolsets.models.UUIDMixin
|
## ::: fastapi_toolsets.models.UUIDMixin
|
||||||
@@ -29,6 +31,4 @@ from fastapi_toolsets.models import (
|
|||||||
|
|
||||||
## ::: fastapi_toolsets.models.TimestampMixin
|
## ::: fastapi_toolsets.models.TimestampMixin
|
||||||
|
|
||||||
## ::: fastapi_toolsets.models.WatchedFieldsMixin
|
## ::: fastapi_toolsets.models.listens_for
|
||||||
|
|
||||||
## ::: fastapi_toolsets.models.watch
|
|
||||||
|
|||||||
28
docs/reference/security.md
Normal file
28
docs/reference/security.md
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# `security`
|
||||||
|
|
||||||
|
Here's the reference for the authentication helpers provided by the `security` module.
|
||||||
|
|
||||||
|
You can import them directly from `fastapi_toolsets.security`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from fastapi_toolsets.security import (
|
||||||
|
AuthSource,
|
||||||
|
BearerTokenAuth,
|
||||||
|
CookieAuth,
|
||||||
|
OAuth2Auth,
|
||||||
|
OpenIDAuth,
|
||||||
|
MultiAuth,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
## ::: fastapi_toolsets.security.AuthSource
|
||||||
|
|
||||||
|
## ::: fastapi_toolsets.security.BearerTokenAuth
|
||||||
|
|
||||||
|
## ::: fastapi_toolsets.security.CookieAuth
|
||||||
|
|
||||||
|
## ::: fastapi_toolsets.security.OAuth2Auth
|
||||||
|
|
||||||
|
## ::: fastapi_toolsets.security.OpenIDAuth
|
||||||
|
|
||||||
|
## ::: fastapi_toolsets.security.MultiAuth
|
||||||
0
docs_src/examples/authentication/__init__.py
Normal file
0
docs_src/examples/authentication/__init__.py
Normal file
9
docs_src/examples/authentication/app.py
Normal file
9
docs_src/examples/authentication/app.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from fastapi import FastAPI
|
||||||
|
|
||||||
|
from fastapi_toolsets.exceptions import init_exceptions_handlers
|
||||||
|
|
||||||
|
from .routes import router
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
init_exceptions_handlers(app=app)
|
||||||
|
app.include_router(router=router)
|
||||||
9
docs_src/examples/authentication/crud.py
Normal file
9
docs_src/examples/authentication/crud.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from fastapi_toolsets.crud import CrudFactory
|
||||||
|
|
||||||
|
from .models import OAuthAccount, OAuthProvider, Team, User, UserToken
|
||||||
|
|
||||||
|
TeamCrud = CrudFactory(model=Team)
|
||||||
|
UserCrud = CrudFactory(model=User)
|
||||||
|
UserTokenCrud = CrudFactory(model=UserToken)
|
||||||
|
OAuthProviderCrud = CrudFactory(model=OAuthProvider)
|
||||||
|
OAuthAccountCrud = CrudFactory(model=OAuthAccount)
|
||||||
15
docs_src/examples/authentication/db.py
Normal file
15
docs_src/examples/authentication/db.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
from fastapi import Depends
|
||||||
|
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||||
|
|
||||||
|
from fastapi_toolsets.db import create_db_context, create_db_dependency
|
||||||
|
|
||||||
|
DATABASE_URL = "postgresql+asyncpg://postgres:postgres@localhost:5432/postgres"
|
||||||
|
|
||||||
|
engine = create_async_engine(url=DATABASE_URL, future=True)
|
||||||
|
async_session_maker = async_sessionmaker(bind=engine, expire_on_commit=False)
|
||||||
|
|
||||||
|
get_db = create_db_dependency(session_maker=async_session_maker)
|
||||||
|
get_db_context = create_db_context(session_maker=async_session_maker)
|
||||||
|
|
||||||
|
|
||||||
|
SessionDep = Depends(get_db)
|
||||||
105
docs_src/examples/authentication/models.py
Normal file
105
docs_src/examples/authentication/models.py
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
import enum
|
||||||
|
from datetime import datetime
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from sqlalchemy import (
|
||||||
|
Boolean,
|
||||||
|
DateTime,
|
||||||
|
Enum,
|
||||||
|
ForeignKey,
|
||||||
|
Integer,
|
||||||
|
String,
|
||||||
|
UniqueConstraint,
|
||||||
|
)
|
||||||
|
from sqlalchemy.dialects.postgresql import UUID as PG_UUID
|
||||||
|
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
||||||
|
|
||||||
|
from fastapi_toolsets.models import TimestampMixin, UUIDMixin
|
||||||
|
|
||||||
|
|
||||||
|
class Base(DeclarativeBase, UUIDMixin):
|
||||||
|
type_annotation_map = {
|
||||||
|
str: String(),
|
||||||
|
int: Integer(),
|
||||||
|
UUID: PG_UUID(as_uuid=True),
|
||||||
|
datetime: DateTime(timezone=True),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class UserRole(enum.Enum):
|
||||||
|
admin = "admin"
|
||||||
|
moderator = "moderator"
|
||||||
|
user = "user"
|
||||||
|
|
||||||
|
|
||||||
|
class Team(Base, TimestampMixin):
|
||||||
|
__tablename__ = "teams"
|
||||||
|
|
||||||
|
name: Mapped[str] = mapped_column(String, unique=True, index=True)
|
||||||
|
users: Mapped[list["User"]] = relationship(back_populates="team")
|
||||||
|
|
||||||
|
|
||||||
|
class User(Base, TimestampMixin):
|
||||||
|
__tablename__ = "users"
|
||||||
|
|
||||||
|
username: Mapped[str] = mapped_column(String, unique=True, index=True)
|
||||||
|
email: Mapped[str | None] = mapped_column(
|
||||||
|
String, unique=True, index=True, nullable=True
|
||||||
|
)
|
||||||
|
hashed_password: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||||
|
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
||||||
|
role: Mapped[UserRole] = mapped_column(Enum(UserRole), default=UserRole.user)
|
||||||
|
|
||||||
|
team_id: Mapped[UUID | None] = mapped_column(ForeignKey("teams.id"), nullable=True)
|
||||||
|
team: Mapped["Team | None"] = relationship(back_populates="users")
|
||||||
|
oauth_accounts: Mapped[list["OAuthAccount"]] = relationship(back_populates="user")
|
||||||
|
tokens: Mapped[list["UserToken"]] = relationship(back_populates="user")
|
||||||
|
|
||||||
|
|
||||||
|
class UserToken(Base, TimestampMixin):
|
||||||
|
"""API tokens for a user (multiple allowed)."""
|
||||||
|
|
||||||
|
__tablename__ = "user_tokens"
|
||||||
|
|
||||||
|
user_id: Mapped[UUID] = mapped_column(ForeignKey("users.id"))
|
||||||
|
# Store hashed token value
|
||||||
|
token_hash: Mapped[str] = mapped_column(String, unique=True, index=True)
|
||||||
|
name: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||||
|
expires_at: Mapped[datetime | None] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
user: Mapped["User"] = relationship(back_populates="tokens")
|
||||||
|
|
||||||
|
|
||||||
|
class OAuthProvider(Base, TimestampMixin):
|
||||||
|
"""Configurable OAuth2 / OpenID Connect provider."""
|
||||||
|
|
||||||
|
__tablename__ = "oauth_providers"
|
||||||
|
|
||||||
|
slug: Mapped[str] = mapped_column(String, unique=True, index=True)
|
||||||
|
name: Mapped[str] = mapped_column(String)
|
||||||
|
client_id: Mapped[str] = mapped_column(String)
|
||||||
|
client_secret: Mapped[str] = mapped_column(String)
|
||||||
|
discovery_url: Mapped[str] = mapped_column(String, nullable=False)
|
||||||
|
scopes: Mapped[str] = mapped_column(String, default="openid email profile")
|
||||||
|
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
||||||
|
|
||||||
|
accounts: Mapped[list["OAuthAccount"]] = relationship(back_populates="provider")
|
||||||
|
|
||||||
|
|
||||||
|
class OAuthAccount(Base, TimestampMixin):
|
||||||
|
"""OAuth2 / OpenID Connect account linked to a user."""
|
||||||
|
|
||||||
|
__tablename__ = "oauth_accounts"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("provider_id", "subject", name="uq_oauth_provider_subject"),
|
||||||
|
)
|
||||||
|
|
||||||
|
user_id: Mapped[UUID] = mapped_column(ForeignKey("users.id"))
|
||||||
|
provider_id: Mapped[UUID] = mapped_column(ForeignKey("oauth_providers.id"))
|
||||||
|
# OAuth `sub` / OpenID subject identifier
|
||||||
|
subject: Mapped[str] = mapped_column(String)
|
||||||
|
|
||||||
|
user: Mapped["User"] = relationship(back_populates="oauth_accounts")
|
||||||
|
provider: Mapped["OAuthProvider"] = relationship(back_populates="accounts")
|
||||||
122
docs_src/examples/authentication/routes.py
Normal file
122
docs_src/examples/authentication/routes.py
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
from typing import Annotated
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
import bcrypt
|
||||||
|
from fastapi import APIRouter, Form, HTTPException, Response, Security
|
||||||
|
|
||||||
|
from fastapi_toolsets.dependencies import PathDependency
|
||||||
|
|
||||||
|
from .crud import UserCrud, UserTokenCrud
|
||||||
|
from .db import SessionDep
|
||||||
|
from .models import OAuthProvider, User, UserToken
|
||||||
|
from .schemas import (
|
||||||
|
ApiTokenCreateRequest,
|
||||||
|
ApiTokenResponse,
|
||||||
|
RegisterRequest,
|
||||||
|
UserCreate,
|
||||||
|
UserResponse,
|
||||||
|
)
|
||||||
|
from .security import auth, cookie_auth, create_api_token
|
||||||
|
|
||||||
|
ProviderDep = PathDependency(
|
||||||
|
model=OAuthProvider,
|
||||||
|
field=OAuthProvider.slug,
|
||||||
|
session_dep=SessionDep,
|
||||||
|
param_name="slug",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def hash_password(password: str) -> str:
|
||||||
|
return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode()
|
||||||
|
|
||||||
|
|
||||||
|
def verify_password(plain: str, hashed: str) -> bool:
|
||||||
|
return bcrypt.checkpw(plain.encode(), hashed.encode())
|
||||||
|
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/auth")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/register", response_model=UserResponse, status_code=201)
|
||||||
|
async def register(body: RegisterRequest, session: SessionDep):
|
||||||
|
existing = await UserCrud.first(
|
||||||
|
session=session, filters=[User.username == body.username]
|
||||||
|
)
|
||||||
|
if existing:
|
||||||
|
raise HTTPException(status_code=409, detail="Username already taken")
|
||||||
|
|
||||||
|
user = await UserCrud.create(
|
||||||
|
session=session,
|
||||||
|
obj=UserCreate(
|
||||||
|
username=body.username,
|
||||||
|
email=body.email,
|
||||||
|
hashed_password=hash_password(body.password),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/token", status_code=204)
|
||||||
|
async def login(
|
||||||
|
session: SessionDep,
|
||||||
|
response: Response,
|
||||||
|
username: Annotated[str, Form()],
|
||||||
|
password: Annotated[str, Form()],
|
||||||
|
):
|
||||||
|
user = await UserCrud.first(session=session, filters=[User.username == username])
|
||||||
|
|
||||||
|
if (
|
||||||
|
not user
|
||||||
|
or not user.hashed_password
|
||||||
|
or not verify_password(password, user.hashed_password)
|
||||||
|
):
|
||||||
|
raise HTTPException(status_code=401, detail="Invalid credentials")
|
||||||
|
|
||||||
|
if not user.is_active:
|
||||||
|
raise HTTPException(status_code=403, detail="Account disabled")
|
||||||
|
|
||||||
|
cookie_auth.set_cookie(response, str(user.id))
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/logout", status_code=204)
|
||||||
|
async def logout(response: Response):
|
||||||
|
cookie_auth.delete_cookie(response)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/me", response_model=UserResponse)
|
||||||
|
async def me(user: User = Security(auth)):
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/tokens", response_model=ApiTokenResponse, status_code=201)
|
||||||
|
async def create_token(
|
||||||
|
body: ApiTokenCreateRequest,
|
||||||
|
user: User = Security(auth),
|
||||||
|
):
|
||||||
|
raw, token_row = await create_api_token(
|
||||||
|
user.id, name=body.name, expires_at=body.expires_at
|
||||||
|
)
|
||||||
|
return ApiTokenResponse(
|
||||||
|
id=token_row.id,
|
||||||
|
name=token_row.name,
|
||||||
|
expires_at=token_row.expires_at,
|
||||||
|
created_at=token_row.created_at,
|
||||||
|
token=raw,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/tokens/{token_id}", status_code=204)
|
||||||
|
async def revoke_token(
|
||||||
|
session: SessionDep,
|
||||||
|
token_id: UUID,
|
||||||
|
user: User = Security(auth),
|
||||||
|
):
|
||||||
|
if not await UserTokenCrud.first(
|
||||||
|
session=session,
|
||||||
|
filters=[UserToken.id == token_id, UserToken.user_id == user.id],
|
||||||
|
):
|
||||||
|
raise HTTPException(status_code=404, detail="Token not found")
|
||||||
|
await UserTokenCrud.delete(
|
||||||
|
session=session,
|
||||||
|
filters=[UserToken.id == token_id, UserToken.user_id == user.id],
|
||||||
|
)
|
||||||
64
docs_src/examples/authentication/schemas.py
Normal file
64
docs_src/examples/authentication/schemas.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from pydantic import EmailStr
|
||||||
|
|
||||||
|
from fastapi_toolsets.schemas import PydanticBase
|
||||||
|
|
||||||
|
|
||||||
|
class RegisterRequest(PydanticBase):
|
||||||
|
username: str
|
||||||
|
password: str
|
||||||
|
email: EmailStr | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class UserResponse(PydanticBase):
|
||||||
|
id: UUID
|
||||||
|
username: str
|
||||||
|
email: str | None
|
||||||
|
role: str
|
||||||
|
is_active: bool
|
||||||
|
|
||||||
|
model_config = {"from_attributes": True}
|
||||||
|
|
||||||
|
|
||||||
|
class ApiTokenCreateRequest(PydanticBase):
|
||||||
|
name: str | None = None
|
||||||
|
expires_at: datetime | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class ApiTokenResponse(PydanticBase):
|
||||||
|
id: UUID
|
||||||
|
name: str | None
|
||||||
|
expires_at: datetime | None
|
||||||
|
created_at: datetime
|
||||||
|
# Only populated on creation
|
||||||
|
token: str | None = None
|
||||||
|
|
||||||
|
model_config = {"from_attributes": True}
|
||||||
|
|
||||||
|
|
||||||
|
class OAuthProviderResponse(PydanticBase):
|
||||||
|
slug: str
|
||||||
|
name: str
|
||||||
|
|
||||||
|
model_config = {"from_attributes": True}
|
||||||
|
|
||||||
|
|
||||||
|
class UserCreate(PydanticBase):
|
||||||
|
username: str
|
||||||
|
email: str | None = None
|
||||||
|
hashed_password: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class UserTokenCreate(PydanticBase):
|
||||||
|
user_id: UUID
|
||||||
|
token_hash: str
|
||||||
|
name: str | None = None
|
||||||
|
expires_at: datetime | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class OAuthAccountCreate(PydanticBase):
|
||||||
|
user_id: UUID
|
||||||
|
provider_id: UUID
|
||||||
|
subject: str
|
||||||
100
docs_src/examples/authentication/security.py
Normal file
100
docs_src/examples/authentication/security.py
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
import hashlib
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from sqlalchemy.orm import selectinload
|
||||||
|
|
||||||
|
from fastapi_toolsets.exceptions import UnauthorizedError
|
||||||
|
from fastapi_toolsets.security import (
|
||||||
|
APIKeyHeaderAuth,
|
||||||
|
BearerTokenAuth,
|
||||||
|
CookieAuth,
|
||||||
|
MultiAuth,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .crud import UserCrud, UserTokenCrud
|
||||||
|
from .db import get_db_context
|
||||||
|
from .models import User, UserRole, UserToken
|
||||||
|
from .schemas import UserTokenCreate
|
||||||
|
|
||||||
|
SESSION_COOKIE = "session"
|
||||||
|
SECRET_KEY = "123456789"
|
||||||
|
|
||||||
|
|
||||||
|
def _hash_token(token: str) -> str:
|
||||||
|
return hashlib.sha256(token.encode()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
async def _verify_token(token: str, role: UserRole | None = None) -> User:
|
||||||
|
async with get_db_context() as db:
|
||||||
|
user_token = await UserTokenCrud.first(
|
||||||
|
session=db,
|
||||||
|
filters=[UserToken.token_hash == _hash_token(token)],
|
||||||
|
load_options=[selectinload(UserToken.user)],
|
||||||
|
)
|
||||||
|
|
||||||
|
if user_token is None or not user_token.user.is_active:
|
||||||
|
raise UnauthorizedError()
|
||||||
|
|
||||||
|
if user_token.expires_at and user_token.expires_at < datetime.now(timezone.utc):
|
||||||
|
raise UnauthorizedError()
|
||||||
|
|
||||||
|
user = user_token.user
|
||||||
|
|
||||||
|
if role is not None and user.role != role:
|
||||||
|
raise HTTPException(status_code=403, detail="Insufficient permissions")
|
||||||
|
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
async def _verify_cookie(user_id: str, role: UserRole | None = None) -> User:
|
||||||
|
async with get_db_context() as db:
|
||||||
|
user = await UserCrud.first(
|
||||||
|
session=db,
|
||||||
|
filters=[User.id == UUID(user_id)],
|
||||||
|
)
|
||||||
|
|
||||||
|
if not user or not user.is_active:
|
||||||
|
raise UnauthorizedError()
|
||||||
|
|
||||||
|
if role is not None and user.role != role:
|
||||||
|
raise HTTPException(status_code=403, detail="Insufficient permissions")
|
||||||
|
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
bearer_auth = BearerTokenAuth(
|
||||||
|
validator=_verify_token,
|
||||||
|
prefix="ctf_",
|
||||||
|
)
|
||||||
|
header_auth = APIKeyHeaderAuth(
|
||||||
|
name="X-API-Key",
|
||||||
|
validator=_verify_token,
|
||||||
|
)
|
||||||
|
cookie_auth = CookieAuth(
|
||||||
|
name=SESSION_COOKIE,
|
||||||
|
validator=_verify_cookie,
|
||||||
|
secret_key=SECRET_KEY,
|
||||||
|
)
|
||||||
|
auth = MultiAuth(bearer_auth, header_auth, cookie_auth)
|
||||||
|
|
||||||
|
|
||||||
|
async def create_api_token(
|
||||||
|
user_id: UUID,
|
||||||
|
*,
|
||||||
|
name: str | None = None,
|
||||||
|
expires_at: datetime | None = None,
|
||||||
|
) -> tuple[str, UserToken]:
|
||||||
|
raw = bearer_auth.generate_token()
|
||||||
|
async with get_db_context() as db:
|
||||||
|
token_row = await UserTokenCrud.create(
|
||||||
|
session=db,
|
||||||
|
obj=UserTokenCreate(
|
||||||
|
user_id=user_id,
|
||||||
|
token_hash=_hash_token(raw),
|
||||||
|
name=name,
|
||||||
|
expires_at=expires_at,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return raw, token_row
|
||||||
@@ -2,7 +2,6 @@ from typing import Annotated
|
|||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends
|
||||||
|
|
||||||
from fastapi_toolsets.crud import OrderByClause
|
|
||||||
from fastapi_toolsets.schemas import (
|
from fastapi_toolsets.schemas import (
|
||||||
CursorPaginatedResponse,
|
CursorPaginatedResponse,
|
||||||
OffsetPaginatedResponse,
|
OffsetPaginatedResponse,
|
||||||
@@ -22,21 +21,18 @@ async def list_articles_offset(
|
|||||||
session: SessionDep,
|
session: SessionDep,
|
||||||
params: Annotated[
|
params: Annotated[
|
||||||
dict,
|
dict,
|
||||||
Depends(ArticleCrud.offset_params(default_page_size=20, max_page_size=100)),
|
Depends(
|
||||||
|
ArticleCrud.offset_paginate_params(
|
||||||
|
default_page_size=20,
|
||||||
|
max_page_size=100,
|
||||||
|
default_order_field=Article.created_at,
|
||||||
|
)
|
||||||
|
),
|
||||||
],
|
],
|
||||||
filter_by: Annotated[dict[str, list[str]], Depends(ArticleCrud.filter_params())],
|
|
||||||
order_by: Annotated[
|
|
||||||
OrderByClause | None,
|
|
||||||
Depends(ArticleCrud.order_params(default_field=Article.created_at)),
|
|
||||||
],
|
|
||||||
search: str | None = None,
|
|
||||||
) -> OffsetPaginatedResponse[ArticleRead]:
|
) -> OffsetPaginatedResponse[ArticleRead]:
|
||||||
return await ArticleCrud.offset_paginate(
|
return await ArticleCrud.offset_paginate(
|
||||||
session=session,
|
session=session,
|
||||||
**params,
|
**params,
|
||||||
search=search,
|
|
||||||
filter_by=filter_by or None,
|
|
||||||
order_by=order_by,
|
|
||||||
schema=ArticleRead,
|
schema=ArticleRead,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -46,21 +42,18 @@ async def list_articles_cursor(
|
|||||||
session: SessionDep,
|
session: SessionDep,
|
||||||
params: Annotated[
|
params: Annotated[
|
||||||
dict,
|
dict,
|
||||||
Depends(ArticleCrud.cursor_params(default_page_size=20, max_page_size=100)),
|
Depends(
|
||||||
|
ArticleCrud.cursor_paginate_params(
|
||||||
|
default_page_size=20,
|
||||||
|
max_page_size=100,
|
||||||
|
default_order_field=Article.created_at,
|
||||||
|
)
|
||||||
|
),
|
||||||
],
|
],
|
||||||
filter_by: Annotated[dict[str, list[str]], Depends(ArticleCrud.filter_params())],
|
|
||||||
order_by: Annotated[
|
|
||||||
OrderByClause | None,
|
|
||||||
Depends(ArticleCrud.order_params(default_field=Article.created_at)),
|
|
||||||
],
|
|
||||||
search: str | None = None,
|
|
||||||
) -> CursorPaginatedResponse[ArticleRead]:
|
) -> CursorPaginatedResponse[ArticleRead]:
|
||||||
return await ArticleCrud.cursor_paginate(
|
return await ArticleCrud.cursor_paginate(
|
||||||
session=session,
|
session=session,
|
||||||
**params,
|
**params,
|
||||||
search=search,
|
|
||||||
filter_by=filter_by or None,
|
|
||||||
order_by=order_by,
|
|
||||||
schema=ArticleRead,
|
schema=ArticleRead,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -70,20 +63,17 @@ async def list_articles(
|
|||||||
session: SessionDep,
|
session: SessionDep,
|
||||||
params: Annotated[
|
params: Annotated[
|
||||||
dict,
|
dict,
|
||||||
Depends(ArticleCrud.paginate_params(default_page_size=20, max_page_size=100)),
|
Depends(
|
||||||
|
ArticleCrud.paginate_params(
|
||||||
|
default_page_size=20,
|
||||||
|
max_page_size=100,
|
||||||
|
default_order_field=Article.created_at,
|
||||||
|
)
|
||||||
|
),
|
||||||
],
|
],
|
||||||
filter_by: Annotated[dict[str, list[str]], Depends(ArticleCrud.filter_params())],
|
|
||||||
order_by: Annotated[
|
|
||||||
OrderByClause | None,
|
|
||||||
Depends(ArticleCrud.order_params(default_field=Article.created_at)),
|
|
||||||
],
|
|
||||||
search: str | None = None,
|
|
||||||
) -> PaginatedResponse[ArticleRead]:
|
) -> PaginatedResponse[ArticleRead]:
|
||||||
return await ArticleCrud.paginate(
|
return await ArticleCrud.paginate(
|
||||||
session,
|
session,
|
||||||
**params,
|
**params,
|
||||||
search=search,
|
|
||||||
filter_by=filter_by or None,
|
|
||||||
order_by=order_by,
|
|
||||||
schema=ArticleRead,
|
schema=ArticleRead,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -66,6 +66,7 @@ manager = "fastapi_toolsets.cli.app:cli"
|
|||||||
dev = [
|
dev = [
|
||||||
{include-group = "tests"},
|
{include-group = "tests"},
|
||||||
{include-group = "docs"},
|
{include-group = "docs"},
|
||||||
|
{include-group = "docs-src"},
|
||||||
"fastapi-toolsets[all]",
|
"fastapi-toolsets[all]",
|
||||||
"prek>=0.3.8",
|
"prek>=0.3.8",
|
||||||
"ruff>=0.1.0",
|
"ruff>=0.1.0",
|
||||||
@@ -80,8 +81,12 @@ tests = [
|
|||||||
"pytest>=8.0.0",
|
"pytest>=8.0.0",
|
||||||
]
|
]
|
||||||
docs = [
|
docs = [
|
||||||
|
"mike",
|
||||||
"mkdocstrings-python>=2.0.2",
|
"mkdocstrings-python>=2.0.2",
|
||||||
"zensical>=0.0.23",
|
"zensical>=0.0.30",
|
||||||
|
]
|
||||||
|
docs-src = [
|
||||||
|
"bcrypt>=4.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
@@ -104,3 +109,6 @@ exclude_lines = [
|
|||||||
"if TYPE_CHECKING:",
|
"if TYPE_CHECKING:",
|
||||||
"raise NotImplementedError",
|
"raise NotImplementedError",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[tool.uv.sources]
|
||||||
|
mike = { git = "https://github.com/squidfunk/mike.git", tag = "2.2.0+zensical-0.1.0" }
|
||||||
|
|||||||
@@ -1,6 +1,11 @@
|
|||||||
"""Generic async CRUD operations for SQLAlchemy models."""
|
"""Generic async CRUD operations for SQLAlchemy models."""
|
||||||
|
|
||||||
from ..exceptions import InvalidFacetFilterError, NoSearchableFieldsError
|
from ..exceptions import (
|
||||||
|
InvalidFacetFilterError,
|
||||||
|
InvalidSearchColumnError,
|
||||||
|
NoSearchableFieldsError,
|
||||||
|
UnsupportedFacetTypeError,
|
||||||
|
)
|
||||||
from ..schemas import PaginationType
|
from ..schemas import PaginationType
|
||||||
from ..types import (
|
from ..types import (
|
||||||
FacetFieldType,
|
FacetFieldType,
|
||||||
@@ -18,6 +23,7 @@ __all__ = [
|
|||||||
"FacetFieldType",
|
"FacetFieldType",
|
||||||
"get_searchable_fields",
|
"get_searchable_fields",
|
||||||
"InvalidFacetFilterError",
|
"InvalidFacetFilterError",
|
||||||
|
"InvalidSearchColumnError",
|
||||||
"JoinType",
|
"JoinType",
|
||||||
"M2MFieldType",
|
"M2MFieldType",
|
||||||
"NoSearchableFieldsError",
|
"NoSearchableFieldsError",
|
||||||
@@ -25,4 +31,5 @@ __all__ = [
|
|||||||
"PaginationType",
|
"PaginationType",
|
||||||
"SearchConfig",
|
"SearchConfig",
|
||||||
"SearchFieldType",
|
"SearchFieldType",
|
||||||
|
"UnsupportedFacetTypeError",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -47,6 +47,7 @@ from .search import (
|
|||||||
build_filter_by,
|
build_filter_by,
|
||||||
build_search_filters,
|
build_search_filters,
|
||||||
facet_keys,
|
facet_keys,
|
||||||
|
search_field_keys,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -263,118 +264,285 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def filter_params(
|
def _resolve_search_columns(
|
||||||
|
cls: type[Self],
|
||||||
|
search_fields: Sequence[SearchFieldType] | None,
|
||||||
|
) -> list[str] | None:
|
||||||
|
"""Return search column keys, or None if no searchable fields configured."""
|
||||||
|
fields = search_fields if search_fields is not None else cls.searchable_fields
|
||||||
|
if not fields:
|
||||||
|
return None
|
||||||
|
return search_field_keys(fields)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _build_paginate_params(
|
||||||
cls: type[Self],
|
cls: type[Self],
|
||||||
*,
|
*,
|
||||||
facet_fields: Sequence[FacetFieldType] | None = None,
|
pagination_params: list[inspect.Parameter],
|
||||||
) -> Callable[..., Awaitable[dict[str, list[str]]]]:
|
pagination_fixed: dict[str, Any],
|
||||||
"""Return a FastAPI dependency that collects facet filter values from query parameters.
|
dep_name: str,
|
||||||
|
search: bool,
|
||||||
|
filter: bool,
|
||||||
|
order: bool,
|
||||||
|
search_fields: Sequence[SearchFieldType] | None,
|
||||||
|
facet_fields: Sequence[FacetFieldType] | None,
|
||||||
|
order_fields: Sequence[QueryableAttribute[Any]] | None,
|
||||||
|
default_order_field: QueryableAttribute[Any] | None,
|
||||||
|
default_order: Literal["asc", "desc"],
|
||||||
|
) -> Callable[..., Awaitable[dict[str, Any]]]:
|
||||||
|
"""Build a consolidated FastAPI dependency that merges pagination, search, filter, and order params."""
|
||||||
|
all_params: list[inspect.Parameter] = list(pagination_params)
|
||||||
|
pagination_param_names = tuple(p.name for p in pagination_params)
|
||||||
|
reserved_names: set[str] = set(pagination_param_names)
|
||||||
|
|
||||||
Args:
|
search_keys: list[str] | None = None
|
||||||
facet_fields: Override the facet fields for this dependency. Falls back to the
|
if search:
|
||||||
class-level ``facet_fields`` if not provided.
|
search_keys = cls._resolve_search_columns(search_fields)
|
||||||
|
if search_keys:
|
||||||
Returns:
|
all_params.extend(
|
||||||
An async dependency function named ``{Model}FilterParams`` that resolves to a
|
[
|
||||||
``dict[str, list[str]]`` containing only the keys that were supplied in the
|
inspect.Parameter(
|
||||||
request (absent/``None`` parameters are excluded).
|
"search",
|
||||||
|
inspect.Parameter.KEYWORD_ONLY,
|
||||||
Raises:
|
annotation=str | None,
|
||||||
ValueError: If no facet fields are configured on this CRUD class and none are
|
default=Query(
|
||||||
provided via ``facet_fields``.
|
default=None, description="Search query string"
|
||||||
"""
|
),
|
||||||
fields = cls._resolve_facet_fields(facet_fields)
|
),
|
||||||
if not fields:
|
inspect.Parameter(
|
||||||
raise ValueError(
|
"search_column",
|
||||||
f"{cls.__name__} has no facet_fields configured. "
|
inspect.Parameter.KEYWORD_ONLY,
|
||||||
"Pass facet_fields= or set them on CrudFactory."
|
annotation=str | None,
|
||||||
|
default=Query(
|
||||||
|
default=None,
|
||||||
|
description="Restrict search to a single column",
|
||||||
|
enum=search_keys,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
)
|
)
|
||||||
keys = facet_keys(fields)
|
reserved_names.update({"search", "search_column"})
|
||||||
|
|
||||||
async def dependency(**kwargs: Any) -> dict[str, list[str]]:
|
filter_keys: list[str] | None = None
|
||||||
return {k: v for k, v in kwargs.items() if v is not None}
|
if filter:
|
||||||
|
resolved_facets = cls._resolve_facet_fields(facet_fields)
|
||||||
dependency.__name__ = f"{cls.model.__name__}FilterParams"
|
if resolved_facets:
|
||||||
dependency.__signature__ = inspect.Signature( # type: ignore[attr-defined] # ty:ignore[unresolved-attribute]
|
filter_keys = facet_keys(resolved_facets)
|
||||||
parameters=[
|
for k in filter_keys:
|
||||||
|
if k in reserved_names:
|
||||||
|
raise ValueError(
|
||||||
|
f"Facet field key {k!r} conflicts with a reserved "
|
||||||
|
f"parameter name. Reserved names: {sorted(reserved_names)}"
|
||||||
|
)
|
||||||
|
all_params.extend(
|
||||||
inspect.Parameter(
|
inspect.Parameter(
|
||||||
k,
|
k,
|
||||||
inspect.Parameter.KEYWORD_ONLY,
|
inspect.Parameter.KEYWORD_ONLY,
|
||||||
annotation=list[str] | None,
|
annotation=list[str] | None,
|
||||||
default=Query(default=None),
|
default=Query(default=None),
|
||||||
)
|
)
|
||||||
for k in keys
|
for k in filter_keys
|
||||||
|
)
|
||||||
|
reserved_names.update(filter_keys)
|
||||||
|
|
||||||
|
order_field_map: dict[str, QueryableAttribute[Any]] | None = None
|
||||||
|
order_valid_keys: list[str] | None = None
|
||||||
|
if order:
|
||||||
|
resolved_order = (
|
||||||
|
order_fields if order_fields is not None else cls.order_fields
|
||||||
|
)
|
||||||
|
if resolved_order:
|
||||||
|
order_field_map = {f.key: f for f in resolved_order}
|
||||||
|
order_valid_keys = sorted(order_field_map.keys())
|
||||||
|
all_params.extend(
|
||||||
|
[
|
||||||
|
inspect.Parameter(
|
||||||
|
"order_by",
|
||||||
|
inspect.Parameter.KEYWORD_ONLY,
|
||||||
|
annotation=str | None,
|
||||||
|
default=Query(
|
||||||
|
None,
|
||||||
|
description=f"Field to order by. Valid values: {order_valid_keys}",
|
||||||
|
enum=order_valid_keys,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
inspect.Parameter(
|
||||||
|
"order",
|
||||||
|
inspect.Parameter.KEYWORD_ONLY,
|
||||||
|
annotation=Literal["asc", "desc"],
|
||||||
|
default=Query(default_order, description="Sort direction"),
|
||||||
|
),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def dependency(**kwargs: Any) -> dict[str, Any]:
|
||||||
|
result: dict[str, Any] = dict(pagination_fixed)
|
||||||
|
for name in pagination_param_names:
|
||||||
|
result[name] = kwargs[name]
|
||||||
|
|
||||||
|
if search_keys is not None:
|
||||||
|
search_val = kwargs.get("search")
|
||||||
|
if search_val is not None:
|
||||||
|
result["search"] = search_val
|
||||||
|
search_col_val = kwargs.get("search_column")
|
||||||
|
if search_col_val is not None:
|
||||||
|
result["search_column"] = search_col_val
|
||||||
|
|
||||||
|
if filter_keys is not None:
|
||||||
|
filter_by = {
|
||||||
|
k: kwargs[k] for k in filter_keys if kwargs.get(k) is not None
|
||||||
|
}
|
||||||
|
result["filter_by"] = filter_by or None
|
||||||
|
|
||||||
|
if order_field_map is not None:
|
||||||
|
order_by_val = kwargs.get("order_by")
|
||||||
|
order_dir = kwargs.get("order", default_order)
|
||||||
|
if order_by_val is None:
|
||||||
|
field = default_order_field
|
||||||
|
elif order_by_val not in order_field_map:
|
||||||
|
raise InvalidOrderFieldError(order_by_val, order_valid_keys or [])
|
||||||
|
else:
|
||||||
|
field = order_field_map[order_by_val]
|
||||||
|
if field is not None:
|
||||||
|
result["order_by"] = (
|
||||||
|
field.asc() if order_dir == "asc" else field.desc()
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
result["order_by"] = None
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
dependency.__name__ = dep_name
|
||||||
|
dependency.__signature__ = inspect.Signature( # type: ignore[attr-defined] # ty:ignore[unresolved-attribute]
|
||||||
|
parameters=all_params,
|
||||||
|
)
|
||||||
return dependency
|
return dependency
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def offset_params(
|
def offset_paginate_params(
|
||||||
cls: type[Self],
|
cls: type[Self],
|
||||||
*,
|
*,
|
||||||
default_page_size: int = 20,
|
default_page_size: int = 20,
|
||||||
max_page_size: int = 100,
|
max_page_size: int = 100,
|
||||||
include_total: bool = True,
|
include_total: bool = True,
|
||||||
|
search: bool = True,
|
||||||
|
filter: bool = True,
|
||||||
|
order: bool = True,
|
||||||
|
search_fields: Sequence[SearchFieldType] | None = None,
|
||||||
|
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||||
|
order_fields: Sequence[QueryableAttribute[Any]] | None = None,
|
||||||
|
default_order_field: QueryableAttribute[Any] | None = None,
|
||||||
|
default_order: Literal["asc", "desc"] = "asc",
|
||||||
) -> Callable[..., Awaitable[dict[str, Any]]]:
|
) -> Callable[..., Awaitable[dict[str, Any]]]:
|
||||||
"""Return a FastAPI dependency that collects offset pagination params from query params.
|
"""Return a FastAPI dependency that collects all params for :meth:`offset_paginate`.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
default_page_size: Default value for the ``items_per_page`` query parameter.
|
default_page_size: Default ``items_per_page`` value.
|
||||||
max_page_size: Maximum allowed value for ``items_per_page`` (enforced via
|
max_page_size: Maximum ``items_per_page`` value.
|
||||||
``le`` on the ``Query``).
|
include_total: Whether to include total count (not a query param).
|
||||||
include_total: Server-side flag forwarded as-is to ``include_total`` in
|
search: Enable search query parameters.
|
||||||
:meth:`offset_paginate`. Not exposed as a query parameter.
|
filter: Enable facet filter query parameters.
|
||||||
|
order: Enable order query parameters.
|
||||||
|
search_fields: Override searchable fields.
|
||||||
|
facet_fields: Override facet fields.
|
||||||
|
order_fields: Override order fields.
|
||||||
|
default_order_field: Default field to order by when ``order_by`` is absent.
|
||||||
|
default_order: Default sort direction.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
An async dependency that resolves to a dict with ``page``,
|
An async dependency that resolves to a dict ready to be unpacked
|
||||||
``items_per_page``, and ``include_total`` keys, ready to be
|
into :meth:`offset_paginate`.
|
||||||
unpacked into :meth:`offset_paginate`.
|
|
||||||
"""
|
"""
|
||||||
|
pagination_params = [
|
||||||
async def dependency(
|
inspect.Parameter(
|
||||||
page: int = Query(1, ge=1, description="Page number (1-indexed)"),
|
"page",
|
||||||
items_per_page: int = _page_size_query(default_page_size, max_page_size),
|
inspect.Parameter.KEYWORD_ONLY,
|
||||||
) -> dict[str, Any]:
|
annotation=int,
|
||||||
return {
|
default=Query(1, ge=1, description="Page number (1-indexed)"),
|
||||||
"page": page,
|
),
|
||||||
"items_per_page": items_per_page,
|
inspect.Parameter(
|
||||||
"include_total": include_total,
|
"items_per_page",
|
||||||
}
|
inspect.Parameter.KEYWORD_ONLY,
|
||||||
|
annotation=int,
|
||||||
dependency.__name__ = f"{cls.model.__name__}OffsetParams"
|
default=_page_size_query(default_page_size, max_page_size),
|
||||||
return dependency
|
),
|
||||||
|
]
|
||||||
|
return cls._build_paginate_params(
|
||||||
|
pagination_params=pagination_params,
|
||||||
|
pagination_fixed={"include_total": include_total},
|
||||||
|
dep_name=f"{cls.model.__name__}OffsetPaginateParams",
|
||||||
|
search=search,
|
||||||
|
filter=filter,
|
||||||
|
order=order,
|
||||||
|
search_fields=search_fields,
|
||||||
|
facet_fields=facet_fields,
|
||||||
|
order_fields=order_fields,
|
||||||
|
default_order_field=default_order_field,
|
||||||
|
default_order=default_order,
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def cursor_params(
|
def cursor_paginate_params(
|
||||||
cls: type[Self],
|
cls: type[Self],
|
||||||
*,
|
*,
|
||||||
default_page_size: int = 20,
|
default_page_size: int = 20,
|
||||||
max_page_size: int = 100,
|
max_page_size: int = 100,
|
||||||
|
search: bool = True,
|
||||||
|
filter: bool = True,
|
||||||
|
order: bool = True,
|
||||||
|
search_fields: Sequence[SearchFieldType] | None = None,
|
||||||
|
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||||
|
order_fields: Sequence[QueryableAttribute[Any]] | None = None,
|
||||||
|
default_order_field: QueryableAttribute[Any] | None = None,
|
||||||
|
default_order: Literal["asc", "desc"] = "asc",
|
||||||
) -> Callable[..., Awaitable[dict[str, Any]]]:
|
) -> Callable[..., Awaitable[dict[str, Any]]]:
|
||||||
"""Return a FastAPI dependency that collects cursor pagination params from query params.
|
"""Return a FastAPI dependency that collects all params for :meth:`cursor_paginate`.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
default_page_size: Default value for the ``items_per_page`` query parameter.
|
default_page_size: Default ``items_per_page`` value.
|
||||||
max_page_size: Maximum allowed value for ``items_per_page`` (enforced via
|
max_page_size: Maximum ``items_per_page`` value.
|
||||||
``le`` on the ``Query``).
|
search: Enable search query parameters.
|
||||||
|
filter: Enable facet filter query parameters.
|
||||||
|
order: Enable order query parameters.
|
||||||
|
search_fields: Override searchable fields.
|
||||||
|
facet_fields: Override facet fields.
|
||||||
|
order_fields: Override order fields.
|
||||||
|
default_order_field: Default field to order by when ``order_by`` is absent.
|
||||||
|
default_order: Default sort direction.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
An async dependency that resolves to a dict with ``cursor`` and
|
An async dependency that resolves to a dict ready to be unpacked
|
||||||
``items_per_page`` keys, ready to be unpacked into
|
into :meth:`cursor_paginate`.
|
||||||
:meth:`cursor_paginate`.
|
|
||||||
"""
|
"""
|
||||||
|
pagination_params = [
|
||||||
async def dependency(
|
inspect.Parameter(
|
||||||
cursor: str | None = Query(
|
"cursor",
|
||||||
|
inspect.Parameter.KEYWORD_ONLY,
|
||||||
|
annotation=str | None,
|
||||||
|
default=Query(
|
||||||
None, description="Cursor token from a previous response"
|
None, description="Cursor token from a previous response"
|
||||||
),
|
),
|
||||||
items_per_page: int = _page_size_query(default_page_size, max_page_size),
|
),
|
||||||
) -> dict[str, Any]:
|
inspect.Parameter(
|
||||||
return {"cursor": cursor, "items_per_page": items_per_page}
|
"items_per_page",
|
||||||
|
inspect.Parameter.KEYWORD_ONLY,
|
||||||
dependency.__name__ = f"{cls.model.__name__}CursorParams"
|
annotation=int,
|
||||||
return dependency
|
default=_page_size_query(default_page_size, max_page_size),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
return cls._build_paginate_params(
|
||||||
|
pagination_params=pagination_params,
|
||||||
|
pagination_fixed={},
|
||||||
|
dep_name=f"{cls.model.__name__}CursorPaginateParams",
|
||||||
|
search=search,
|
||||||
|
filter=filter,
|
||||||
|
order=order,
|
||||||
|
search_fields=search_fields,
|
||||||
|
facet_fields=facet_fields,
|
||||||
|
order_fields=order_fields,
|
||||||
|
default_order_field=default_order_field,
|
||||||
|
default_order=default_order,
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def paginate_params(
|
def paginate_params(
|
||||||
@@ -384,102 +552,81 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
max_page_size: int = 100,
|
max_page_size: int = 100,
|
||||||
default_pagination_type: PaginationType = PaginationType.OFFSET,
|
default_pagination_type: PaginationType = PaginationType.OFFSET,
|
||||||
include_total: bool = True,
|
include_total: bool = True,
|
||||||
|
search: bool = True,
|
||||||
|
filter: bool = True,
|
||||||
|
order: bool = True,
|
||||||
|
search_fields: Sequence[SearchFieldType] | None = None,
|
||||||
|
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||||
|
order_fields: Sequence[QueryableAttribute[Any]] | None = None,
|
||||||
|
default_order_field: QueryableAttribute[Any] | None = None,
|
||||||
|
default_order: Literal["asc", "desc"] = "asc",
|
||||||
) -> Callable[..., Awaitable[dict[str, Any]]]:
|
) -> Callable[..., Awaitable[dict[str, Any]]]:
|
||||||
"""Return a FastAPI dependency that collects all pagination params from query params.
|
"""Return a FastAPI dependency that collects all params for :meth:`paginate`.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
default_page_size: Default value for the ``items_per_page`` query parameter.
|
default_page_size: Default ``items_per_page`` value.
|
||||||
max_page_size: Maximum allowed value for ``items_per_page`` (enforced via
|
max_page_size: Maximum ``items_per_page`` value.
|
||||||
``le`` on the ``Query``).
|
|
||||||
default_pagination_type: Default pagination strategy.
|
default_pagination_type: Default pagination strategy.
|
||||||
include_total: Server-side flag forwarded as-is to ``include_total`` in
|
include_total: Whether to include total count (not a query param).
|
||||||
:meth:`paginate`. Not exposed as a query parameter.
|
search: Enable search query parameters.
|
||||||
|
filter: Enable facet filter query parameters.
|
||||||
|
order: Enable order query parameters.
|
||||||
|
search_fields: Override searchable fields.
|
||||||
|
facet_fields: Override facet fields.
|
||||||
|
order_fields: Override order fields.
|
||||||
|
default_order_field: Default field to order by when ``order_by`` is absent.
|
||||||
|
default_order: Default sort direction.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
An async dependency that resolves to a dict with ``pagination_type``,
|
An async dependency that resolves to a dict ready to be unpacked
|
||||||
``page``, ``cursor``, ``items_per_page``, and ``include_total`` keys,
|
into :meth:`paginate`.
|
||||||
ready to be unpacked into :meth:`paginate`.
|
|
||||||
"""
|
"""
|
||||||
|
pagination_params = [
|
||||||
async def dependency(
|
inspect.Parameter(
|
||||||
pagination_type: PaginationType = Query(
|
"pagination_type",
|
||||||
|
inspect.Parameter.KEYWORD_ONLY,
|
||||||
|
annotation=PaginationType,
|
||||||
|
default=Query(
|
||||||
default_pagination_type, description="Pagination strategy"
|
default_pagination_type, description="Pagination strategy"
|
||||||
),
|
),
|
||||||
page: int = Query(
|
),
|
||||||
|
inspect.Parameter(
|
||||||
|
"page",
|
||||||
|
inspect.Parameter.KEYWORD_ONLY,
|
||||||
|
annotation=int,
|
||||||
|
default=Query(
|
||||||
1, ge=1, description="Page number (1-indexed, offset only)"
|
1, ge=1, description="Page number (1-indexed, offset only)"
|
||||||
),
|
),
|
||||||
cursor: str | None = Query(
|
|
||||||
None, description="Cursor token from a previous response (cursor only)"
|
|
||||||
),
|
),
|
||||||
items_per_page: int = _page_size_query(default_page_size, max_page_size),
|
inspect.Parameter(
|
||||||
) -> dict[str, Any]:
|
"cursor",
|
||||||
return {
|
inspect.Parameter.KEYWORD_ONLY,
|
||||||
"pagination_type": pagination_type,
|
annotation=str | None,
|
||||||
"page": page,
|
default=Query(
|
||||||
"cursor": cursor,
|
None,
|
||||||
"items_per_page": items_per_page,
|
description="Cursor token from a previous response (cursor only)",
|
||||||
"include_total": include_total,
|
),
|
||||||
}
|
),
|
||||||
|
inspect.Parameter(
|
||||||
dependency.__name__ = f"{cls.model.__name__}PaginateParams"
|
"items_per_page",
|
||||||
return dependency
|
inspect.Parameter.KEYWORD_ONLY,
|
||||||
|
annotation=int,
|
||||||
@classmethod
|
default=_page_size_query(default_page_size, max_page_size),
|
||||||
def order_params(
|
),
|
||||||
cls: type[Self],
|
]
|
||||||
*,
|
return cls._build_paginate_params(
|
||||||
order_fields: Sequence[QueryableAttribute[Any]] | None = None,
|
pagination_params=pagination_params,
|
||||||
default_field: QueryableAttribute[Any] | None = None,
|
pagination_fixed={"include_total": include_total},
|
||||||
default_order: Literal["asc", "desc"] = "asc",
|
dep_name=f"{cls.model.__name__}PaginateParams",
|
||||||
) -> Callable[..., Awaitable[OrderByClause | None]]:
|
search=search,
|
||||||
"""Return a FastAPI dependency that resolves order query params into an order_by clause.
|
filter=filter,
|
||||||
|
order=order,
|
||||||
Args:
|
search_fields=search_fields,
|
||||||
order_fields: Override the allowed order fields. Falls back to the class-level
|
facet_fields=facet_fields,
|
||||||
``order_fields`` if not provided.
|
order_fields=order_fields,
|
||||||
default_field: Field to order by when ``order_by`` query param is absent.
|
default_order_field=default_order_field,
|
||||||
If ``None`` and no ``order_by`` is provided, no ordering is applied.
|
default_order=default_order,
|
||||||
default_order: Default order direction when ``order`` is absent
|
|
||||||
(``"asc"`` or ``"desc"``).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An async dependency function named ``{Model}OrderParams`` that resolves to an
|
|
||||||
``OrderByClause`` (or ``None``). Pass it to ``Depends()`` in your route.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If no order fields are configured on this CRUD class and none are
|
|
||||||
provided via ``order_fields``.
|
|
||||||
InvalidOrderFieldError: When the request provides an unknown ``order_by`` value.
|
|
||||||
"""
|
|
||||||
fields = order_fields if order_fields is not None else cls.order_fields
|
|
||||||
if not fields:
|
|
||||||
raise ValueError(
|
|
||||||
f"{cls.__name__} has no order_fields configured. "
|
|
||||||
"Pass order_fields= or set them on CrudFactory."
|
|
||||||
)
|
)
|
||||||
field_map: dict[str, QueryableAttribute[Any]] = {f.key: f for f in fields}
|
|
||||||
valid_keys = sorted(field_map.keys())
|
|
||||||
|
|
||||||
async def dependency(
|
|
||||||
order_by: str | None = Query(
|
|
||||||
None, description=f"Field to order by. Valid values: {valid_keys}"
|
|
||||||
),
|
|
||||||
order: Literal["asc", "desc"] = Query(
|
|
||||||
default_order, description="Sort direction"
|
|
||||||
),
|
|
||||||
) -> OrderByClause | None:
|
|
||||||
if order_by is None:
|
|
||||||
if default_field is None:
|
|
||||||
return None
|
|
||||||
field = default_field
|
|
||||||
elif order_by not in field_map:
|
|
||||||
raise InvalidOrderFieldError(order_by, valid_keys)
|
|
||||||
else:
|
|
||||||
field = field_map[order_by]
|
|
||||||
return field.asc() if order == "asc" else field.desc()
|
|
||||||
|
|
||||||
dependency.__name__ = f"{cls.model.__name__}OrderParams"
|
|
||||||
return dependency
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -1056,6 +1203,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
include_total: bool = True,
|
include_total: bool = True,
|
||||||
search: str | SearchConfig | None = None,
|
search: str | SearchConfig | None = None,
|
||||||
search_fields: Sequence[SearchFieldType] | None = None,
|
search_fields: Sequence[SearchFieldType] | None = None,
|
||||||
|
search_column: str | None = None,
|
||||||
facet_fields: Sequence[FacetFieldType] | None = None,
|
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||||
filter_by: dict[str, Any] | BaseModel | None = None,
|
filter_by: dict[str, Any] | BaseModel | None = None,
|
||||||
schema: type[BaseModel],
|
schema: type[BaseModel],
|
||||||
@@ -1075,6 +1223,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
``pagination.total_count`` will be ``None``.
|
``pagination.total_count`` will be ``None``.
|
||||||
search: Search query string or SearchConfig object
|
search: Search query string or SearchConfig object
|
||||||
search_fields: Fields to search in (overrides class default)
|
search_fields: Fields to search in (overrides class default)
|
||||||
|
search_column: Restrict search to a single column key.
|
||||||
facet_fields: Columns to compute distinct values for (overrides class default)
|
facet_fields: Columns to compute distinct values for (overrides class default)
|
||||||
filter_by: Dict of {column_key: value} to filter by declared facet fields.
|
filter_by: Dict of {column_key: value} to filter by declared facet fields.
|
||||||
Keys must match the column.key of a facet field. Scalar → equality,
|
Keys must match the column.key of a facet field. Scalar → equality,
|
||||||
@@ -1097,6 +1246,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
search,
|
search,
|
||||||
search_fields=search_fields,
|
search_fields=search_fields,
|
||||||
default_fields=cls.searchable_fields,
|
default_fields=cls.searchable_fields,
|
||||||
|
search_column=search_column,
|
||||||
)
|
)
|
||||||
filters.extend(search_filters)
|
filters.extend(search_filters)
|
||||||
search_joins.extend(new_search_joins)
|
search_joins.extend(new_search_joins)
|
||||||
@@ -1153,6 +1303,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
filter_attributes = await cls._build_filter_attributes(
|
filter_attributes = await cls._build_filter_attributes(
|
||||||
session, facet_fields, filters, search_joins
|
session, facet_fields, filters, search_joins
|
||||||
)
|
)
|
||||||
|
search_columns = cls._resolve_search_columns(search_fields)
|
||||||
|
|
||||||
return OffsetPaginatedResponse(
|
return OffsetPaginatedResponse(
|
||||||
data=items,
|
data=items,
|
||||||
@@ -1163,6 +1314,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
has_more=has_more,
|
has_more=has_more,
|
||||||
),
|
),
|
||||||
filter_attributes=filter_attributes,
|
filter_attributes=filter_attributes,
|
||||||
|
search_columns=search_columns,
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -1179,6 +1331,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
items_per_page: int = 20,
|
items_per_page: int = 20,
|
||||||
search: str | SearchConfig | None = None,
|
search: str | SearchConfig | None = None,
|
||||||
search_fields: Sequence[SearchFieldType] | None = None,
|
search_fields: Sequence[SearchFieldType] | None = None,
|
||||||
|
search_column: str | None = None,
|
||||||
facet_fields: Sequence[FacetFieldType] | None = None,
|
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||||
filter_by: dict[str, Any] | BaseModel | None = None,
|
filter_by: dict[str, Any] | BaseModel | None = None,
|
||||||
schema: type[BaseModel],
|
schema: type[BaseModel],
|
||||||
@@ -1199,6 +1352,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
items_per_page: Number of items per page (default 20).
|
items_per_page: Number of items per page (default 20).
|
||||||
search: Search query string or SearchConfig object.
|
search: Search query string or SearchConfig object.
|
||||||
search_fields: Fields to search in (overrides class default).
|
search_fields: Fields to search in (overrides class default).
|
||||||
|
search_column: Restrict search to a single column key.
|
||||||
facet_fields: Columns to compute distinct values for (overrides class default).
|
facet_fields: Columns to compute distinct values for (overrides class default).
|
||||||
filter_by: Dict of {column_key: value} to filter by declared facet fields.
|
filter_by: Dict of {column_key: value} to filter by declared facet fields.
|
||||||
Keys must match the column.key of a facet field. Scalar → equality,
|
Keys must match the column.key of a facet field. Scalar → equality,
|
||||||
@@ -1238,6 +1392,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
search,
|
search,
|
||||||
search_fields=search_fields,
|
search_fields=search_fields,
|
||||||
default_fields=cls.searchable_fields,
|
default_fields=cls.searchable_fields,
|
||||||
|
search_column=search_column,
|
||||||
)
|
)
|
||||||
filters.extend(search_filters)
|
filters.extend(search_filters)
|
||||||
search_joins.extend(new_search_joins)
|
search_joins.extend(new_search_joins)
|
||||||
@@ -1308,6 +1463,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
filter_attributes = await cls._build_filter_attributes(
|
filter_attributes = await cls._build_filter_attributes(
|
||||||
session, facet_fields, filters, search_joins
|
session, facet_fields, filters, search_joins
|
||||||
)
|
)
|
||||||
|
search_columns = cls._resolve_search_columns(search_fields)
|
||||||
|
|
||||||
return CursorPaginatedResponse(
|
return CursorPaginatedResponse(
|
||||||
data=items,
|
data=items,
|
||||||
@@ -1318,6 +1474,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
has_more=has_more,
|
has_more=has_more,
|
||||||
),
|
),
|
||||||
filter_attributes=filter_attributes,
|
filter_attributes=filter_attributes,
|
||||||
|
search_columns=search_columns,
|
||||||
)
|
)
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
@@ -1338,6 +1495,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
include_total: bool = ...,
|
include_total: bool = ...,
|
||||||
search: str | SearchConfig | None = ...,
|
search: str | SearchConfig | None = ...,
|
||||||
search_fields: Sequence[SearchFieldType] | None = ...,
|
search_fields: Sequence[SearchFieldType] | None = ...,
|
||||||
|
search_column: str | None = ...,
|
||||||
facet_fields: Sequence[FacetFieldType] | None = ...,
|
facet_fields: Sequence[FacetFieldType] | None = ...,
|
||||||
filter_by: dict[str, Any] | BaseModel | None = ...,
|
filter_by: dict[str, Any] | BaseModel | None = ...,
|
||||||
schema: type[BaseModel],
|
schema: type[BaseModel],
|
||||||
@@ -1361,6 +1519,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
include_total: bool = ...,
|
include_total: bool = ...,
|
||||||
search: str | SearchConfig | None = ...,
|
search: str | SearchConfig | None = ...,
|
||||||
search_fields: Sequence[SearchFieldType] | None = ...,
|
search_fields: Sequence[SearchFieldType] | None = ...,
|
||||||
|
search_column: str | None = ...,
|
||||||
facet_fields: Sequence[FacetFieldType] | None = ...,
|
facet_fields: Sequence[FacetFieldType] | None = ...,
|
||||||
filter_by: dict[str, Any] | BaseModel | None = ...,
|
filter_by: dict[str, Any] | BaseModel | None = ...,
|
||||||
schema: type[BaseModel],
|
schema: type[BaseModel],
|
||||||
@@ -1383,6 +1542,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
include_total: bool = True,
|
include_total: bool = True,
|
||||||
search: str | SearchConfig | None = None,
|
search: str | SearchConfig | None = None,
|
||||||
search_fields: Sequence[SearchFieldType] | None = None,
|
search_fields: Sequence[SearchFieldType] | None = None,
|
||||||
|
search_column: str | None = None,
|
||||||
facet_fields: Sequence[FacetFieldType] | None = None,
|
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||||
filter_by: dict[str, Any] | BaseModel | None = None,
|
filter_by: dict[str, Any] | BaseModel | None = None,
|
||||||
schema: type[BaseModel],
|
schema: type[BaseModel],
|
||||||
@@ -1410,6 +1570,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
only applies when ``pagination_type`` is ``OFFSET``.
|
only applies when ``pagination_type`` is ``OFFSET``.
|
||||||
search: Search query string or :class:`.SearchConfig` object.
|
search: Search query string or :class:`.SearchConfig` object.
|
||||||
search_fields: Fields to search in (overrides class default).
|
search_fields: Fields to search in (overrides class default).
|
||||||
|
search_column: Restrict search to a single column key.
|
||||||
facet_fields: Columns to compute distinct values for (overrides
|
facet_fields: Columns to compute distinct values for (overrides
|
||||||
class default).
|
class default).
|
||||||
filter_by: Dict of ``{column_key: value}`` to filter by declared
|
filter_by: Dict of ``{column_key: value}`` to filter by declared
|
||||||
@@ -1438,6 +1599,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
items_per_page=items_per_page,
|
items_per_page=items_per_page,
|
||||||
search=search,
|
search=search,
|
||||||
search_fields=search_fields,
|
search_fields=search_fields,
|
||||||
|
search_column=search_column,
|
||||||
facet_fields=facet_fields,
|
facet_fields=facet_fields,
|
||||||
filter_by=filter_by,
|
filter_by=filter_by,
|
||||||
schema=schema,
|
schema=schema,
|
||||||
@@ -1457,6 +1619,7 @@ class AsyncCrud(Generic[ModelType]):
|
|||||||
include_total=include_total,
|
include_total=include_total,
|
||||||
search=search,
|
search=search,
|
||||||
search_fields=search_fields,
|
search_fields=search_fields,
|
||||||
|
search_column=search_column,
|
||||||
facet_fields=facet_fields,
|
facet_fields=facet_fields,
|
||||||
filter_by=filter_by,
|
filter_by=filter_by,
|
||||||
schema=schema,
|
schema=schema,
|
||||||
@@ -1488,7 +1651,7 @@ def CrudFactory(
|
|||||||
responses. Supports direct columns (``User.status``) and relationship tuples
|
responses. Supports direct columns (``User.status``) and relationship tuples
|
||||||
(``(User.role, Role.name)``). Can be overridden per call.
|
(``(User.role, Role.name)``). Can be overridden per call.
|
||||||
order_fields: Optional list of model attributes that callers are allowed to order by
|
order_fields: Optional list of model attributes that callers are allowed to order by
|
||||||
via ``order_params()``. Can be overridden per call.
|
via ``offset_paginate_params()``. Can be overridden per call.
|
||||||
m2m_fields: Optional mapping for many-to-many relationships.
|
m2m_fields: Optional mapping for many-to-many relationships.
|
||||||
Maps schema field names (containing lists of IDs) to
|
Maps schema field names (containing lists of IDs) to
|
||||||
SQLAlchemy relationship attributes.
|
SQLAlchemy relationship attributes.
|
||||||
|
|||||||
@@ -2,17 +2,32 @@
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import functools
|
import functools
|
||||||
from collections import Counter
|
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
from dataclasses import dataclass, replace
|
from dataclasses import dataclass, replace
|
||||||
from typing import TYPE_CHECKING, Any, Literal
|
from typing import TYPE_CHECKING, Any, Literal
|
||||||
|
|
||||||
from sqlalchemy import String, and_, or_, select
|
from sqlalchemy import String, and_, func, or_, select
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from sqlalchemy.orm import DeclarativeBase
|
from sqlalchemy.orm import DeclarativeBase
|
||||||
from sqlalchemy.orm.attributes import InstrumentedAttribute
|
from sqlalchemy.orm.attributes import InstrumentedAttribute
|
||||||
|
from sqlalchemy.types import (
|
||||||
|
ARRAY,
|
||||||
|
Boolean,
|
||||||
|
Date,
|
||||||
|
DateTime,
|
||||||
|
Enum,
|
||||||
|
Integer,
|
||||||
|
Numeric,
|
||||||
|
Time,
|
||||||
|
Uuid,
|
||||||
|
)
|
||||||
|
|
||||||
from ..exceptions import InvalidFacetFilterError, NoSearchableFieldsError
|
from ..exceptions import (
|
||||||
|
InvalidFacetFilterError,
|
||||||
|
InvalidSearchColumnError,
|
||||||
|
NoSearchableFieldsError,
|
||||||
|
UnsupportedFacetTypeError,
|
||||||
|
)
|
||||||
from ..types import FacetFieldType, SearchFieldType
|
from ..types import FacetFieldType, SearchFieldType
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -82,6 +97,7 @@ def build_search_filters(
|
|||||||
search: str | SearchConfig,
|
search: str | SearchConfig,
|
||||||
search_fields: Sequence[SearchFieldType] | None = None,
|
search_fields: Sequence[SearchFieldType] | None = None,
|
||||||
default_fields: Sequence[SearchFieldType] | None = None,
|
default_fields: Sequence[SearchFieldType] | None = None,
|
||||||
|
search_column: str | None = None,
|
||||||
) -> tuple[list["ColumnElement[bool]"], list[InstrumentedAttribute[Any]]]:
|
) -> tuple[list["ColumnElement[bool]"], list[InstrumentedAttribute[Any]]]:
|
||||||
"""Build SQLAlchemy filter conditions for search.
|
"""Build SQLAlchemy filter conditions for search.
|
||||||
|
|
||||||
@@ -90,6 +106,8 @@ def build_search_filters(
|
|||||||
search: Search string or SearchConfig
|
search: Search string or SearchConfig
|
||||||
search_fields: Fields specified per-call (takes priority)
|
search_fields: Fields specified per-call (takes priority)
|
||||||
default_fields: Default fields (from ClassVar)
|
default_fields: Default fields (from ClassVar)
|
||||||
|
search_column: Optional key to narrow search to a single field.
|
||||||
|
Must match one of the resolved search field keys.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Tuple of (filter_conditions, joins_needed)
|
Tuple of (filter_conditions, joins_needed)
|
||||||
@@ -116,6 +134,14 @@ def build_search_filters(
|
|||||||
if not fields:
|
if not fields:
|
||||||
raise NoSearchableFieldsError(model)
|
raise NoSearchableFieldsError(model)
|
||||||
|
|
||||||
|
# Narrow to a single column when search_column is specified
|
||||||
|
if search_column is not None:
|
||||||
|
keys = search_field_keys(fields)
|
||||||
|
index = {k: f for k, f in zip(keys, fields)}
|
||||||
|
if search_column not in index:
|
||||||
|
raise InvalidSearchColumnError(search_column, sorted(index))
|
||||||
|
fields = [index[search_column]]
|
||||||
|
|
||||||
query = config.query.strip()
|
query = config.query.strip()
|
||||||
filters: list[ColumnElement[bool]] = []
|
filters: list[ColumnElement[bool]] = []
|
||||||
joins: list[InstrumentedAttribute[Any]] = []
|
joins: list[InstrumentedAttribute[Any]] = []
|
||||||
@@ -150,8 +176,13 @@ def build_search_filters(
|
|||||||
return filters, joins
|
return filters, joins
|
||||||
|
|
||||||
|
|
||||||
|
def search_field_keys(fields: Sequence[SearchFieldType]) -> list[str]:
|
||||||
|
"""Return a human-readable key for each search field."""
|
||||||
|
return facet_keys(fields)
|
||||||
|
|
||||||
|
|
||||||
def facet_keys(facet_fields: Sequence[FacetFieldType]) -> list[str]:
|
def facet_keys(facet_fields: Sequence[FacetFieldType]) -> list[str]:
|
||||||
"""Return a key for each facet field, disambiguating duplicate column keys.
|
"""Return a key for each facet field.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
facet_fields: Sequence of facet fields — either direct columns or
|
facet_fields: Sequence of facet fields — either direct columns or
|
||||||
@@ -160,22 +191,12 @@ def facet_keys(facet_fields: Sequence[FacetFieldType]) -> list[str]:
|
|||||||
Returns:
|
Returns:
|
||||||
A list of string keys, one per facet field, in the same order.
|
A list of string keys, one per facet field, in the same order.
|
||||||
"""
|
"""
|
||||||
raw: list[tuple[str, str | None]] = []
|
keys: list[str] = []
|
||||||
for field in facet_fields:
|
for field in facet_fields:
|
||||||
if isinstance(field, tuple):
|
if isinstance(field, tuple):
|
||||||
rel = field[-2]
|
keys.append("__".join(el.key for el in field))
|
||||||
column = field[-1]
|
|
||||||
raw.append((column.key, rel.key))
|
|
||||||
else:
|
else:
|
||||||
raw.append((field.key, None))
|
keys.append(field.key)
|
||||||
|
|
||||||
counts = Counter(col_key for col_key, _ in raw)
|
|
||||||
keys: list[str] = []
|
|
||||||
for col_key, rel_key in raw:
|
|
||||||
if counts[col_key] > 1 and rel_key is not None:
|
|
||||||
keys.append(f"{rel_key}__{col_key}")
|
|
||||||
else:
|
|
||||||
keys.append(col_key)
|
|
||||||
return keys
|
return keys
|
||||||
|
|
||||||
|
|
||||||
@@ -212,6 +233,13 @@ async def build_facets(
|
|||||||
rels = ()
|
rels = ()
|
||||||
column = field
|
column = field
|
||||||
|
|
||||||
|
col_type = column.property.columns[0].type
|
||||||
|
is_array = isinstance(col_type, ARRAY)
|
||||||
|
|
||||||
|
if is_array:
|
||||||
|
unnested = func.unnest(column).label(column.key)
|
||||||
|
q = select(unnested).select_from(model).distinct()
|
||||||
|
else:
|
||||||
q = select(column).select_from(model).distinct()
|
q = select(column).select_from(model).distinct()
|
||||||
|
|
||||||
# Apply base joins (already done on main query, but needed here independently)
|
# Apply base joins (already done on main query, but needed here independently)
|
||||||
@@ -226,6 +254,9 @@ async def build_facets(
|
|||||||
if base_filters:
|
if base_filters:
|
||||||
q = q.where(and_(*base_filters))
|
q = q.where(and_(*base_filters))
|
||||||
|
|
||||||
|
if is_array:
|
||||||
|
q = q.order_by(unnested)
|
||||||
|
else:
|
||||||
q = q.order_by(column)
|
q = q.order_by(column)
|
||||||
result = await session.execute(q)
|
result = await session.execute(q)
|
||||||
values = [row[0] for row in result.all() if row[0] is not None]
|
values = [row[0] for row in result.all() if row[0] is not None]
|
||||||
@@ -237,6 +268,10 @@ async def build_facets(
|
|||||||
return dict(pairs)
|
return dict(pairs)
|
||||||
|
|
||||||
|
|
||||||
|
_EQUALITY_TYPES = (String, Integer, Numeric, Date, DateTime, Time, Enum, Uuid)
|
||||||
|
"""Column types that support equality / IN filtering in build_filter_by."""
|
||||||
|
|
||||||
|
|
||||||
def build_filter_by(
|
def build_filter_by(
|
||||||
filter_by: dict[str, Any],
|
filter_by: dict[str, Any],
|
||||||
facet_fields: Sequence[FacetFieldType],
|
facet_fields: Sequence[FacetFieldType],
|
||||||
@@ -282,9 +317,23 @@ def build_filter_by(
|
|||||||
joins.append(rel)
|
joins.append(rel)
|
||||||
added_join_keys.add(rel_key)
|
added_join_keys.add(rel_key)
|
||||||
|
|
||||||
|
col_type = column.property.columns[0].type
|
||||||
|
if isinstance(col_type, ARRAY):
|
||||||
|
if isinstance(value, list):
|
||||||
|
filters.append(column.overlap(value))
|
||||||
|
else:
|
||||||
|
filters.append(column.any(value))
|
||||||
|
elif isinstance(col_type, Boolean):
|
||||||
|
if isinstance(value, list):
|
||||||
|
filters.append(column.in_(value))
|
||||||
|
else:
|
||||||
|
filters.append(column.is_(value))
|
||||||
|
elif isinstance(col_type, _EQUALITY_TYPES):
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
filters.append(column.in_(value))
|
filters.append(column.in_(value))
|
||||||
else:
|
else:
|
||||||
filters.append(column == value)
|
filters.append(column == value)
|
||||||
|
else:
|
||||||
|
raise UnsupportedFacetTypeError(key, type(col_type).__name__)
|
||||||
|
|
||||||
return filters, joins
|
return filters, joins
|
||||||
|
|||||||
@@ -24,9 +24,12 @@ __all__ = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
_SessionT = TypeVar("_SessionT", bound=AsyncSession)
|
||||||
|
|
||||||
|
|
||||||
def create_db_dependency(
|
def create_db_dependency(
|
||||||
session_maker: async_sessionmaker[AsyncSession],
|
session_maker: async_sessionmaker[_SessionT],
|
||||||
) -> Callable[[], AsyncGenerator[AsyncSession, None]]:
|
) -> Callable[[], AsyncGenerator[_SessionT, None]]:
|
||||||
"""Create a FastAPI dependency for database sessions.
|
"""Create a FastAPI dependency for database sessions.
|
||||||
|
|
||||||
Creates a dependency function that yields a session and auto-commits
|
Creates a dependency function that yields a session and auto-commits
|
||||||
@@ -54,7 +57,7 @@ def create_db_dependency(
|
|||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
|
|
||||||
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
async def get_db() -> AsyncGenerator[_SessionT, None]:
|
||||||
async with session_maker() as session:
|
async with session_maker() as session:
|
||||||
await session.connection()
|
await session.connection()
|
||||||
yield session
|
yield session
|
||||||
@@ -65,8 +68,8 @@ def create_db_dependency(
|
|||||||
|
|
||||||
|
|
||||||
def create_db_context(
|
def create_db_context(
|
||||||
session_maker: async_sessionmaker[AsyncSession],
|
session_maker: async_sessionmaker[_SessionT],
|
||||||
) -> Callable[[], AbstractAsyncContextManager[AsyncSession]]:
|
) -> Callable[[], AbstractAsyncContextManager[_SessionT]]:
|
||||||
"""Create a context manager for database sessions.
|
"""Create a context manager for database sessions.
|
||||||
|
|
||||||
Creates a context manager for use outside of FastAPI request handlers,
|
Creates a context manager for use outside of FastAPI request handlers,
|
||||||
|
|||||||
@@ -7,9 +7,11 @@ from .exceptions import (
|
|||||||
ForbiddenError,
|
ForbiddenError,
|
||||||
InvalidFacetFilterError,
|
InvalidFacetFilterError,
|
||||||
InvalidOrderFieldError,
|
InvalidOrderFieldError,
|
||||||
|
InvalidSearchColumnError,
|
||||||
NoSearchableFieldsError,
|
NoSearchableFieldsError,
|
||||||
NotFoundError,
|
NotFoundError,
|
||||||
UnauthorizedError,
|
UnauthorizedError,
|
||||||
|
UnsupportedFacetTypeError,
|
||||||
generate_error_responses,
|
generate_error_responses,
|
||||||
)
|
)
|
||||||
from .handler import init_exceptions_handlers
|
from .handler import init_exceptions_handlers
|
||||||
@@ -23,7 +25,9 @@ __all__ = [
|
|||||||
"init_exceptions_handlers",
|
"init_exceptions_handlers",
|
||||||
"InvalidFacetFilterError",
|
"InvalidFacetFilterError",
|
||||||
"InvalidOrderFieldError",
|
"InvalidOrderFieldError",
|
||||||
|
"InvalidSearchColumnError",
|
||||||
"NoSearchableFieldsError",
|
"NoSearchableFieldsError",
|
||||||
"NotFoundError",
|
"NotFoundError",
|
||||||
"UnauthorizedError",
|
"UnauthorizedError",
|
||||||
|
"UnsupportedFacetTypeError",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -144,6 +144,61 @@ class InvalidFacetFilterError(ApiException):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedFacetTypeError(ApiException):
|
||||||
|
"""Raised when a facet field has a column type not supported by filter_by."""
|
||||||
|
|
||||||
|
api_error = ApiError(
|
||||||
|
code=400,
|
||||||
|
msg="Unsupported Facet Type",
|
||||||
|
desc="The column type is not supported for facet filtering.",
|
||||||
|
err_code="FACET-TYPE-400",
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, key: str, col_type: str) -> None:
|
||||||
|
"""Initialize the exception.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: The facet field key.
|
||||||
|
col_type: The unsupported column type name.
|
||||||
|
"""
|
||||||
|
self.key = key
|
||||||
|
self.col_type = col_type
|
||||||
|
super().__init__(
|
||||||
|
desc=(
|
||||||
|
f"Facet field '{key}' has unsupported column type '{col_type}'. "
|
||||||
|
f"Supported types: String, Integer, Numeric, Boolean, "
|
||||||
|
f"Date, DateTime, Time, Enum, Uuid, ARRAY."
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidSearchColumnError(ApiException):
|
||||||
|
"""Raised when search_column is not one of the configured searchable fields."""
|
||||||
|
|
||||||
|
api_error = ApiError(
|
||||||
|
code=400,
|
||||||
|
msg="Invalid Search Column",
|
||||||
|
desc="The requested search column is not a configured searchable field.",
|
||||||
|
err_code="SEARCH-COL-400",
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, column: str, valid_columns: list[str]) -> None:
|
||||||
|
"""Initialize the exception.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
column: The unknown search column provided by the caller.
|
||||||
|
valid_columns: List of valid search column keys.
|
||||||
|
"""
|
||||||
|
self.column = column
|
||||||
|
self.valid_columns = valid_columns
|
||||||
|
super().__init__(
|
||||||
|
desc=(
|
||||||
|
f"'{column}' is not a searchable column. "
|
||||||
|
f"Valid columns: {valid_columns}."
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class InvalidOrderFieldError(ApiException):
|
class InvalidOrderFieldError(ApiException):
|
||||||
"""Raised when order_by contains a field not in the allowed order fields."""
|
"""Raised when order_by contains a field not in the allowed order fields."""
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from collections.abc import Callable, Sequence
|
from collections.abc import Callable, Sequence
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
|
from enum import Enum
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
from sqlalchemy.orm import DeclarativeBase
|
from sqlalchemy.orm import DeclarativeBase
|
||||||
@@ -12,6 +13,13 @@ from .enum import Context
|
|||||||
logger = get_logger()
|
logger = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_contexts(
|
||||||
|
contexts: list[str | Enum] | tuple[str | Enum, ...],
|
||||||
|
) -> list[str]:
|
||||||
|
"""Convert a sequence of any Enum subclass and/or plain strings to a list of strings."""
|
||||||
|
return [c.value if isinstance(c, Enum) else c for c in contexts]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Fixture:
|
class Fixture:
|
||||||
"""A fixture definition with metadata."""
|
"""A fixture definition with metadata."""
|
||||||
@@ -50,17 +58,42 @@ class FixtureRegistry:
|
|||||||
Post(id=1, title="Test", user_id=1),
|
Post(id=1, title="Test", user_id=1),
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Fixtures with the same name may be registered for **different** contexts.
|
||||||
|
When multiple contexts are loaded together, their instances are merged:
|
||||||
|
|
||||||
|
```python
|
||||||
|
@fixtures.register(contexts=[Context.BASE])
|
||||||
|
def users():
|
||||||
|
return [User(id=1, username="admin")]
|
||||||
|
|
||||||
|
@fixtures.register(contexts=[Context.TESTING])
|
||||||
|
def users():
|
||||||
|
return [User(id=2, username="tester")]
|
||||||
|
# load_fixtures_by_context(..., Context.BASE, Context.TESTING)
|
||||||
|
# → loads both User(admin) and User(tester) under the "users" name
|
||||||
|
```
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
contexts: list[str | Context] | None = None,
|
contexts: list[str | Enum] | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
self._fixtures: dict[str, Fixture] = {}
|
self._fixtures: dict[str, list[Fixture]] = {}
|
||||||
self._default_contexts: list[str] | None = (
|
self._default_contexts: list[str] | None = (
|
||||||
[c.value if isinstance(c, Context) else c for c in contexts]
|
_normalize_contexts(contexts) if contexts else None
|
||||||
if contexts
|
)
|
||||||
else None
|
|
||||||
|
def _validate_no_context_overlap(self, name: str, new_contexts: list[str]) -> None:
|
||||||
|
"""Raise ``ValueError`` if any existing variant for *name* overlaps."""
|
||||||
|
existing_variants = self._fixtures.get(name, [])
|
||||||
|
new_set = set(new_contexts)
|
||||||
|
for variant in existing_variants:
|
||||||
|
if set(variant.contexts) & new_set:
|
||||||
|
raise ValueError(
|
||||||
|
f"Fixture '{name}' already exists in the current registry "
|
||||||
|
f"with overlapping contexts. Use distinct context sets for "
|
||||||
|
f"each variant of the same fixture name."
|
||||||
)
|
)
|
||||||
|
|
||||||
def register(
|
def register(
|
||||||
@@ -69,7 +102,7 @@ class FixtureRegistry:
|
|||||||
*,
|
*,
|
||||||
name: str | None = None,
|
name: str | None = None,
|
||||||
depends_on: list[str] | None = None,
|
depends_on: list[str] | None = None,
|
||||||
contexts: list[str | Context] | None = None,
|
contexts: list[str | Enum] | None = None,
|
||||||
) -> Callable[..., Any]:
|
) -> Callable[..., Any]:
|
||||||
"""Register a fixture function.
|
"""Register a fixture function.
|
||||||
|
|
||||||
@@ -79,7 +112,8 @@ class FixtureRegistry:
|
|||||||
func: Fixture function returning list of model instances
|
func: Fixture function returning list of model instances
|
||||||
name: Fixture name (defaults to function name)
|
name: Fixture name (defaults to function name)
|
||||||
depends_on: List of fixture names this depends on
|
depends_on: List of fixture names this depends on
|
||||||
contexts: List of contexts this fixture belongs to
|
contexts: List of contexts this fixture belongs to. Both
|
||||||
|
:class:`Context` enum values and plain strings are accepted.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
```python
|
```python
|
||||||
@@ -90,7 +124,6 @@ class FixtureRegistry:
|
|||||||
@fixtures.register(depends_on=["roles"], contexts=[Context.TESTING])
|
@fixtures.register(depends_on=["roles"], contexts=[Context.TESTING])
|
||||||
def test_users():
|
def test_users():
|
||||||
return [User(id=1, username="test", role_id=1)]
|
return [User(id=1, username="test", role_id=1)]
|
||||||
```
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def decorator(
|
def decorator(
|
||||||
@@ -98,20 +131,21 @@ class FixtureRegistry:
|
|||||||
) -> Callable[[], Sequence[DeclarativeBase]]:
|
) -> Callable[[], Sequence[DeclarativeBase]]:
|
||||||
fixture_name = name or cast(Any, fn).__name__
|
fixture_name = name or cast(Any, fn).__name__
|
||||||
if contexts is not None:
|
if contexts is not None:
|
||||||
fixture_contexts = [
|
fixture_contexts = _normalize_contexts(contexts)
|
||||||
c.value if isinstance(c, Context) else c for c in contexts
|
|
||||||
]
|
|
||||||
elif self._default_contexts is not None:
|
elif self._default_contexts is not None:
|
||||||
fixture_contexts = self._default_contexts
|
fixture_contexts = self._default_contexts
|
||||||
else:
|
else:
|
||||||
fixture_contexts = [Context.BASE.value]
|
fixture_contexts = [Context.BASE.value]
|
||||||
|
|
||||||
self._fixtures[fixture_name] = Fixture(
|
self._validate_no_context_overlap(fixture_name, fixture_contexts)
|
||||||
|
self._fixtures.setdefault(fixture_name, []).append(
|
||||||
|
Fixture(
|
||||||
name=fixture_name,
|
name=fixture_name,
|
||||||
func=fn,
|
func=fn,
|
||||||
depends_on=depends_on or [],
|
depends_on=depends_on or [],
|
||||||
contexts=fixture_contexts,
|
contexts=fixture_contexts,
|
||||||
)
|
)
|
||||||
|
)
|
||||||
return fn
|
return fn
|
||||||
|
|
||||||
if func is not None:
|
if func is not None:
|
||||||
@@ -121,11 +155,14 @@ class FixtureRegistry:
|
|||||||
def include_registry(self, registry: "FixtureRegistry") -> None:
|
def include_registry(self, registry: "FixtureRegistry") -> None:
|
||||||
"""Include another `FixtureRegistry` in the same current `FixtureRegistry`.
|
"""Include another `FixtureRegistry` in the same current `FixtureRegistry`.
|
||||||
|
|
||||||
|
Fixtures with the same name are allowed as long as their context sets
|
||||||
|
do not overlap. Conflicting contexts raise :class:`ValueError`.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
registry: The `FixtureRegistry` to include
|
registry: The `FixtureRegistry` to include
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: If a fixture name already exists in the current registry
|
ValueError: If a fixture name already exists with overlapping contexts
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
```python
|
```python
|
||||||
@@ -139,31 +176,73 @@ class FixtureRegistry:
|
|||||||
registry.include_registry(registry=dev_registry)
|
registry.include_registry(registry=dev_registry)
|
||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
for name, fixture in registry._fixtures.items():
|
for name, variants in registry._fixtures.items():
|
||||||
if name in self._fixtures:
|
for fixture in variants:
|
||||||
raise ValueError(
|
self._validate_no_context_overlap(name, fixture.contexts)
|
||||||
f"Fixture '{name}' already exists in the current registry"
|
self._fixtures.setdefault(name, []).append(fixture)
|
||||||
)
|
|
||||||
self._fixtures[name] = fixture
|
|
||||||
|
|
||||||
def get(self, name: str) -> Fixture:
|
def get(self, name: str) -> Fixture:
|
||||||
"""Get a fixture by name."""
|
"""Get a fixture by name.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
KeyError: If no fixture with *name* is registered.
|
||||||
|
ValueError: If the fixture has multiple context variants — use
|
||||||
|
:meth:`get_variants` in that case.
|
||||||
|
"""
|
||||||
if name not in self._fixtures:
|
if name not in self._fixtures:
|
||||||
raise KeyError(f"Fixture '{name}' not found")
|
raise KeyError(f"Fixture '{name}' not found")
|
||||||
return self._fixtures[name]
|
variants = self._fixtures[name]
|
||||||
|
if len(variants) > 1:
|
||||||
|
raise ValueError(
|
||||||
|
f"Fixture '{name}' has {len(variants)} context variants. "
|
||||||
|
f"Use get_variants('{name}') to retrieve them."
|
||||||
|
)
|
||||||
|
return variants[0]
|
||||||
|
|
||||||
|
def get_variants(self, name: str, *contexts: str | Enum) -> list[Fixture]:
|
||||||
|
"""Return all registered variants for *name*, optionally filtered by context.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Fixture name.
|
||||||
|
*contexts: If given, only return variants whose context set
|
||||||
|
intersects with these values. Both :class:`Context` enum
|
||||||
|
values and plain strings are accepted.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of matching :class:`Fixture` objects (may be empty when a
|
||||||
|
context filter is applied and nothing matches).
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
KeyError: If no fixture with *name* is registered.
|
||||||
|
"""
|
||||||
|
if name not in self._fixtures:
|
||||||
|
raise KeyError(f"Fixture '{name}' not found")
|
||||||
|
variants = self._fixtures[name]
|
||||||
|
if not contexts:
|
||||||
|
return list(variants)
|
||||||
|
context_values = set(_normalize_contexts(contexts))
|
||||||
|
return [v for v in variants if set(v.contexts) & context_values]
|
||||||
|
|
||||||
def get_all(self) -> list[Fixture]:
|
def get_all(self) -> list[Fixture]:
|
||||||
"""Get all registered fixtures."""
|
"""Get all registered fixtures (all variants of all names)."""
|
||||||
return list(self._fixtures.values())
|
return [f for variants in self._fixtures.values() for f in variants]
|
||||||
|
|
||||||
def get_by_context(self, *contexts: str | Context) -> list[Fixture]:
|
def get_by_context(self, *contexts: str | Enum) -> list[Fixture]:
|
||||||
"""Get fixtures for specific contexts."""
|
"""Get fixtures for specific contexts."""
|
||||||
context_values = {c.value if isinstance(c, Context) else c for c in contexts}
|
context_values = set(_normalize_contexts(contexts))
|
||||||
return [f for f in self._fixtures.values() if set(f.contexts) & context_values]
|
return [
|
||||||
|
f
|
||||||
|
for variants in self._fixtures.values()
|
||||||
|
for f in variants
|
||||||
|
if set(f.contexts) & context_values
|
||||||
|
]
|
||||||
|
|
||||||
def resolve_dependencies(self, *names: str) -> list[str]:
|
def resolve_dependencies(self, *names: str) -> list[str]:
|
||||||
"""Resolve fixture dependencies in topological order.
|
"""Resolve fixture dependencies in topological order.
|
||||||
|
|
||||||
|
When a fixture name has multiple context variants, the union of all
|
||||||
|
variants' ``depends_on`` lists is used.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
*names: Fixture names to resolve
|
*names: Fixture names to resolve
|
||||||
|
|
||||||
@@ -185,9 +264,20 @@ class FixtureRegistry:
|
|||||||
raise ValueError(f"Circular dependency detected: {name}")
|
raise ValueError(f"Circular dependency detected: {name}")
|
||||||
|
|
||||||
visiting.add(name)
|
visiting.add(name)
|
||||||
fixture = self.get(name)
|
variants = self._fixtures.get(name)
|
||||||
|
if variants is None:
|
||||||
|
raise KeyError(f"Fixture '{name}' not found")
|
||||||
|
|
||||||
for dep in fixture.depends_on:
|
# Union of depends_on across all variants, preserving first-seen order.
|
||||||
|
seen_deps: set[str] = set()
|
||||||
|
all_deps: list[str] = []
|
||||||
|
for variant in variants:
|
||||||
|
for dep in variant.depends_on:
|
||||||
|
if dep not in seen_deps:
|
||||||
|
all_deps.append(dep)
|
||||||
|
seen_deps.add(dep)
|
||||||
|
|
||||||
|
for dep in all_deps:
|
||||||
visit(dep)
|
visit(dep)
|
||||||
|
|
||||||
visiting.remove(name)
|
visiting.remove(name)
|
||||||
@@ -199,7 +289,7 @@ class FixtureRegistry:
|
|||||||
|
|
||||||
return resolved
|
return resolved
|
||||||
|
|
||||||
def resolve_context_dependencies(self, *contexts: str | Context) -> list[str]:
|
def resolve_context_dependencies(self, *contexts: str | Enum) -> list[str]:
|
||||||
"""Resolve all fixtures for contexts with dependencies.
|
"""Resolve all fixtures for contexts with dependencies.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -209,7 +299,9 @@ class FixtureRegistry:
|
|||||||
List of fixture names in load order
|
List of fixture names in load order
|
||||||
"""
|
"""
|
||||||
context_fixtures = self.get_by_context(*contexts)
|
context_fixtures = self.get_by_context(*contexts)
|
||||||
names = [f.name for f in context_fixtures]
|
# Deduplicate names while preserving first-seen order (a name can
|
||||||
|
# appear multiple times if it has variants in different contexts).
|
||||||
|
names = list(dict.fromkeys(f.name for f in context_fixtures))
|
||||||
|
|
||||||
all_deps: set[str] = set()
|
all_deps: set[str] = set()
|
||||||
for name in names:
|
for name in names:
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
"""Fixture loading utilities for database seeding."""
|
"""Fixture loading utilities for database seeding."""
|
||||||
|
|
||||||
from collections.abc import Callable, Sequence
|
from collections.abc import Callable, Sequence
|
||||||
|
from enum import Enum
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
from sqlalchemy import inspect as sa_inspect
|
||||||
|
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from sqlalchemy.orm import DeclarativeBase
|
from sqlalchemy.orm import DeclarativeBase
|
||||||
|
|
||||||
@@ -10,23 +13,177 @@ from ..db import get_transaction
|
|||||||
from ..logger import get_logger
|
from ..logger import get_logger
|
||||||
from ..types import ModelType
|
from ..types import ModelType
|
||||||
from .enum import LoadStrategy
|
from .enum import LoadStrategy
|
||||||
from .registry import Context, FixtureRegistry
|
from .registry import FixtureRegistry, _normalize_contexts
|
||||||
|
|
||||||
logger = get_logger()
|
logger = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
def _instance_to_dict(instance: DeclarativeBase) -> dict[str, Any]:
|
||||||
|
"""Extract column values from a model instance, skipping unset server-default columns."""
|
||||||
|
state = sa_inspect(instance)
|
||||||
|
state_dict = state.dict
|
||||||
|
result: dict[str, Any] = {}
|
||||||
|
for prop in state.mapper.column_attrs:
|
||||||
|
if prop.key not in state_dict:
|
||||||
|
continue
|
||||||
|
val = state_dict[prop.key]
|
||||||
|
if val is None:
|
||||||
|
col = prop.columns[0]
|
||||||
|
|
||||||
|
if (
|
||||||
|
col.server_default is not None
|
||||||
|
or (col.default is not None and col.default.is_callable)
|
||||||
|
or col.autoincrement is True
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
result[prop.key] = val
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _group_by_type(
|
||||||
|
instances: list[DeclarativeBase],
|
||||||
|
) -> list[tuple[type[DeclarativeBase], list[DeclarativeBase]]]:
|
||||||
|
"""Group instances by their concrete model class, preserving insertion order."""
|
||||||
|
groups: dict[type[DeclarativeBase], list[DeclarativeBase]] = {}
|
||||||
|
for instance in instances:
|
||||||
|
groups.setdefault(type(instance), []).append(instance)
|
||||||
|
return list(groups.items())
|
||||||
|
|
||||||
|
|
||||||
|
def _group_by_column_set(
|
||||||
|
dicts: list[dict[str, Any]],
|
||||||
|
instances: list[DeclarativeBase],
|
||||||
|
) -> list[tuple[list[dict[str, Any]], list[DeclarativeBase]]]:
|
||||||
|
"""Group (dict, instance) pairs by their dict key sets."""
|
||||||
|
groups: dict[
|
||||||
|
frozenset[str], tuple[list[dict[str, Any]], list[DeclarativeBase]]
|
||||||
|
] = {}
|
||||||
|
for d, inst in zip(dicts, instances):
|
||||||
|
key = frozenset(d)
|
||||||
|
if key not in groups:
|
||||||
|
groups[key] = ([], [])
|
||||||
|
groups[key][0].append(d)
|
||||||
|
groups[key][1].append(inst)
|
||||||
|
return list(groups.values())
|
||||||
|
|
||||||
|
|
||||||
|
async def _batch_insert(
|
||||||
|
session: AsyncSession,
|
||||||
|
model_cls: type[DeclarativeBase],
|
||||||
|
instances: list[DeclarativeBase],
|
||||||
|
) -> None:
|
||||||
|
"""INSERT all instances — raises on conflict (no duplicate handling)."""
|
||||||
|
dicts = [_instance_to_dict(i) for i in instances]
|
||||||
|
for group_dicts, _ in _group_by_column_set(dicts, instances):
|
||||||
|
await session.execute(pg_insert(model_cls).values(group_dicts))
|
||||||
|
|
||||||
|
|
||||||
|
async def _batch_merge(
|
||||||
|
session: AsyncSession,
|
||||||
|
model_cls: type[DeclarativeBase],
|
||||||
|
instances: list[DeclarativeBase],
|
||||||
|
) -> None:
|
||||||
|
"""UPSERT: insert new rows, update existing ones with the provided values."""
|
||||||
|
mapper = model_cls.__mapper__
|
||||||
|
pk_names = [col.name for col in mapper.primary_key]
|
||||||
|
pk_names_set = set(pk_names)
|
||||||
|
non_pk_cols = [
|
||||||
|
prop.key
|
||||||
|
for prop in mapper.column_attrs
|
||||||
|
if not any(col.name in pk_names_set for col in prop.columns)
|
||||||
|
]
|
||||||
|
|
||||||
|
dicts = [_instance_to_dict(i) for i in instances]
|
||||||
|
for group_dicts, _ in _group_by_column_set(dicts, instances):
|
||||||
|
stmt = pg_insert(model_cls).values(group_dicts)
|
||||||
|
|
||||||
|
inserted_keys = set(group_dicts[0])
|
||||||
|
update_cols = [col for col in non_pk_cols if col in inserted_keys]
|
||||||
|
|
||||||
|
if update_cols:
|
||||||
|
stmt = stmt.on_conflict_do_update(
|
||||||
|
index_elements=pk_names,
|
||||||
|
set_={col: stmt.excluded[col] for col in update_cols},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
stmt = stmt.on_conflict_do_nothing(index_elements=pk_names)
|
||||||
|
|
||||||
|
await session.execute(stmt)
|
||||||
|
|
||||||
|
|
||||||
|
async def _batch_skip_existing(
|
||||||
|
session: AsyncSession,
|
||||||
|
model_cls: type[DeclarativeBase],
|
||||||
|
instances: list[DeclarativeBase],
|
||||||
|
) -> list[DeclarativeBase]:
|
||||||
|
"""INSERT only rows that do not already exist; return the inserted ones."""
|
||||||
|
mapper = model_cls.__mapper__
|
||||||
|
pk_names = [col.name for col in mapper.primary_key]
|
||||||
|
|
||||||
|
no_pk: list[DeclarativeBase] = []
|
||||||
|
with_pk_pairs: list[tuple[DeclarativeBase, Any]] = []
|
||||||
|
for inst in instances:
|
||||||
|
pk = _get_primary_key(inst)
|
||||||
|
if pk is None:
|
||||||
|
no_pk.append(inst)
|
||||||
|
else:
|
||||||
|
with_pk_pairs.append((inst, pk))
|
||||||
|
|
||||||
|
loaded: list[DeclarativeBase] = list(no_pk)
|
||||||
|
if no_pk:
|
||||||
|
no_pk_dicts = [_instance_to_dict(i) for i in no_pk]
|
||||||
|
for group_dicts, _ in _group_by_column_set(no_pk_dicts, no_pk):
|
||||||
|
await session.execute(pg_insert(model_cls).values(group_dicts))
|
||||||
|
|
||||||
|
if with_pk_pairs:
|
||||||
|
with_pk = [i for i, _ in with_pk_pairs]
|
||||||
|
with_pk_dicts = [_instance_to_dict(i) for i in with_pk]
|
||||||
|
for group_dicts, group_insts in _group_by_column_set(with_pk_dicts, with_pk):
|
||||||
|
stmt = (
|
||||||
|
pg_insert(model_cls)
|
||||||
|
.values(group_dicts)
|
||||||
|
.on_conflict_do_nothing(index_elements=pk_names)
|
||||||
|
)
|
||||||
|
result = await session.execute(stmt.returning(*mapper.primary_key))
|
||||||
|
inserted_pks = {
|
||||||
|
row[0] if len(pk_names) == 1 else tuple(row) for row in result
|
||||||
|
}
|
||||||
|
loaded.extend(
|
||||||
|
inst
|
||||||
|
for inst, pk in zip(
|
||||||
|
group_insts, [_get_primary_key(i) for i in group_insts]
|
||||||
|
)
|
||||||
|
if pk in inserted_pks
|
||||||
|
)
|
||||||
|
|
||||||
|
return loaded
|
||||||
|
|
||||||
|
|
||||||
async def _load_ordered(
|
async def _load_ordered(
|
||||||
session: AsyncSession,
|
session: AsyncSession,
|
||||||
registry: FixtureRegistry,
|
registry: FixtureRegistry,
|
||||||
ordered_names: list[str],
|
ordered_names: list[str],
|
||||||
strategy: LoadStrategy,
|
strategy: LoadStrategy,
|
||||||
|
contexts: tuple[str, ...] | None = None,
|
||||||
) -> dict[str, list[DeclarativeBase]]:
|
) -> dict[str, list[DeclarativeBase]]:
|
||||||
"""Load fixtures in order."""
|
"""Load fixtures in order using batch Core INSERT statements."""
|
||||||
results: dict[str, list[DeclarativeBase]] = {}
|
results: dict[str, list[DeclarativeBase]] = {}
|
||||||
|
|
||||||
for name in ordered_names:
|
for name in ordered_names:
|
||||||
fixture = registry.get(name)
|
variants = (
|
||||||
instances = list(fixture.func())
|
registry.get_variants(name, *contexts)
|
||||||
|
if contexts is not None
|
||||||
|
else registry.get_variants(name)
|
||||||
|
)
|
||||||
|
|
||||||
|
if contexts is not None and not variants:
|
||||||
|
variants = registry.get_variants(name)
|
||||||
|
|
||||||
|
if not variants:
|
||||||
|
results[name] = []
|
||||||
|
continue
|
||||||
|
|
||||||
|
instances = [inst for v in variants for inst in v.func()]
|
||||||
|
|
||||||
if not instances:
|
if not instances:
|
||||||
results[name] = []
|
results[name] = []
|
||||||
@@ -36,25 +193,17 @@ async def _load_ordered(
|
|||||||
loaded: list[DeclarativeBase] = []
|
loaded: list[DeclarativeBase] = []
|
||||||
|
|
||||||
async with get_transaction(session):
|
async with get_transaction(session):
|
||||||
for instance in instances:
|
for model_cls, group in _group_by_type(instances):
|
||||||
if strategy == LoadStrategy.INSERT:
|
match strategy:
|
||||||
session.add(instance)
|
case LoadStrategy.INSERT:
|
||||||
loaded.append(instance)
|
await _batch_insert(session, model_cls, group)
|
||||||
|
loaded.extend(group)
|
||||||
elif strategy == LoadStrategy.MERGE:
|
case LoadStrategy.MERGE:
|
||||||
merged = await session.merge(instance)
|
await _batch_merge(session, model_cls, group)
|
||||||
loaded.append(merged)
|
loaded.extend(group)
|
||||||
|
case LoadStrategy.SKIP_EXISTING:
|
||||||
else: # LoadStrategy.SKIP_EXISTING
|
inserted = await _batch_skip_existing(session, model_cls, group)
|
||||||
pk = _get_primary_key(instance)
|
loaded.extend(inserted)
|
||||||
if pk is not None:
|
|
||||||
existing = await session.get(type(instance), pk)
|
|
||||||
if existing is None:
|
|
||||||
session.add(instance)
|
|
||||||
loaded.append(instance)
|
|
||||||
else:
|
|
||||||
session.add(instance)
|
|
||||||
loaded.append(instance)
|
|
||||||
|
|
||||||
results[name] = loaded
|
results[name] = loaded
|
||||||
logger.info(f"Loaded fixture '{name}': {len(loaded)} {model_name}(s)")
|
logger.info(f"Loaded fixture '{name}': {len(loaded)} {model_name}(s)")
|
||||||
@@ -109,6 +258,8 @@ async def load_fixtures(
|
|||||||
) -> dict[str, list[DeclarativeBase]]:
|
) -> dict[str, list[DeclarativeBase]]:
|
||||||
"""Load specific fixtures by name with dependencies.
|
"""Load specific fixtures by name with dependencies.
|
||||||
|
|
||||||
|
All context variants of each requested fixture are loaded and merged.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
session: Database session
|
session: Database session
|
||||||
registry: Fixture registry
|
registry: Fixture registry
|
||||||
@@ -125,7 +276,7 @@ async def load_fixtures(
|
|||||||
async def load_fixtures_by_context(
|
async def load_fixtures_by_context(
|
||||||
session: AsyncSession,
|
session: AsyncSession,
|
||||||
registry: FixtureRegistry,
|
registry: FixtureRegistry,
|
||||||
*contexts: str | Context,
|
*contexts: str | Enum,
|
||||||
strategy: LoadStrategy = LoadStrategy.MERGE,
|
strategy: LoadStrategy = LoadStrategy.MERGE,
|
||||||
) -> dict[str, list[DeclarativeBase]]:
|
) -> dict[str, list[DeclarativeBase]]:
|
||||||
"""Load all fixtures for specific contexts.
|
"""Load all fixtures for specific contexts.
|
||||||
@@ -133,11 +284,15 @@ async def load_fixtures_by_context(
|
|||||||
Args:
|
Args:
|
||||||
session: Database session
|
session: Database session
|
||||||
registry: Fixture registry
|
registry: Fixture registry
|
||||||
*contexts: Contexts to load (e.g., Context.BASE, Context.TESTING)
|
*contexts: Contexts to load (e.g., ``Context.BASE``, ``Context.TESTING``,
|
||||||
|
or plain strings for custom contexts)
|
||||||
strategy: How to handle existing records
|
strategy: How to handle existing records
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dict mapping fixture names to loaded instances
|
Dict mapping fixture names to loaded instances
|
||||||
"""
|
"""
|
||||||
|
context_strings = tuple(_normalize_contexts(contexts))
|
||||||
ordered = registry.resolve_context_dependencies(*contexts)
|
ordered = registry.resolve_context_dependencies(*contexts)
|
||||||
return await _load_ordered(session, registry, ordered, strategy)
|
return await _load_ordered(
|
||||||
|
session, registry, ordered, strategy, contexts=context_strings
|
||||||
|
)
|
||||||
|
|||||||
@@ -7,15 +7,15 @@ from .columns import (
|
|||||||
UUIDv7Mixin,
|
UUIDv7Mixin,
|
||||||
UpdatedAtMixin,
|
UpdatedAtMixin,
|
||||||
)
|
)
|
||||||
from .watched import ModelEvent, WatchedFieldsMixin, watch
|
from .watched import EventSession, ModelEvent, listens_for
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
"EventSession",
|
||||||
"ModelEvent",
|
"ModelEvent",
|
||||||
"UUIDMixin",
|
"UUIDMixin",
|
||||||
"UUIDv7Mixin",
|
"UUIDv7Mixin",
|
||||||
"CreatedAtMixin",
|
"CreatedAtMixin",
|
||||||
"UpdatedAtMixin",
|
"UpdatedAtMixin",
|
||||||
"TimestampMixin",
|
"TimestampMixin",
|
||||||
"WatchedFieldsMixin",
|
"listens_for",
|
||||||
"watch",
|
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,14 +6,6 @@ from datetime import datetime
|
|||||||
from sqlalchemy import DateTime, Uuid, text
|
from sqlalchemy import DateTime, Uuid, text
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"UUIDMixin",
|
|
||||||
"UUIDv7Mixin",
|
|
||||||
"CreatedAtMixin",
|
|
||||||
"UpdatedAtMixin",
|
|
||||||
"TimestampMixin",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class UUIDMixin:
|
class UUIDMixin:
|
||||||
"""Mixin that adds a UUID primary key auto-generated by the database."""
|
"""Mixin that adds a UUID primary key auto-generated by the database."""
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
"""Field-change monitoring via SQLAlchemy session events."""
|
"""Field-change monitoring via SQLAlchemy session events."""
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import inspect
|
import inspect
|
||||||
import weakref
|
from collections.abc import Callable
|
||||||
from collections.abc import Awaitable
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Any, TypeVar
|
from typing import Any
|
||||||
|
|
||||||
from sqlalchemy import event
|
from sqlalchemy import event
|
||||||
from sqlalchemy import inspect as sa_inspect
|
from sqlalchemy import inspect as sa_inspect
|
||||||
@@ -14,65 +12,114 @@ from sqlalchemy.orm.attributes import set_committed_value as _sa_set_committed_v
|
|||||||
|
|
||||||
from ..logger import get_logger
|
from ..logger import get_logger
|
||||||
|
|
||||||
__all__ = ["ModelEvent", "WatchedFieldsMixin", "watch"]
|
|
||||||
|
|
||||||
_logger = get_logger()
|
_logger = get_logger()
|
||||||
_T = TypeVar("_T")
|
|
||||||
_CALLBACK_ERROR_MSG = "WatchedFieldsMixin callback raised an unhandled exception"
|
|
||||||
_WATCHED_FIELDS: weakref.WeakKeyDictionary[type, list[str]] = (
|
|
||||||
weakref.WeakKeyDictionary()
|
|
||||||
)
|
|
||||||
_SESSION_PENDING_NEW = "_ft_pending_new"
|
|
||||||
_SESSION_CREATES = "_ft_creates"
|
|
||||||
_SESSION_DELETES = "_ft_deletes"
|
|
||||||
_SESSION_UPDATES = "_ft_updates"
|
|
||||||
_SESSION_SAVEPOINT_DEPTH = "_ft_sp_depth"
|
|
||||||
|
|
||||||
|
|
||||||
class ModelEvent(str, Enum):
|
class ModelEvent(str, Enum):
|
||||||
"""Event types emitted by :class:`WatchedFieldsMixin`."""
|
"""Event types dispatched by :class:`EventSession`."""
|
||||||
|
|
||||||
CREATE = "create"
|
CREATE = "create"
|
||||||
DELETE = "delete"
|
DELETE = "delete"
|
||||||
UPDATE = "update"
|
UPDATE = "update"
|
||||||
|
|
||||||
|
|
||||||
def watch(*fields: str) -> Any:
|
_CALLBACK_ERROR_MSG = "Event callback raised an unhandled exception"
|
||||||
"""Class decorator to filter which fields trigger ``on_update``.
|
_SESSION_CREATES = "_ft_creates"
|
||||||
|
_SESSION_DELETES = "_ft_deletes"
|
||||||
|
_SESSION_UPDATES = "_ft_updates"
|
||||||
|
_DEFERRED_STRATEGY_KEY = (("deferred", True), ("instrument", True))
|
||||||
|
_EVENT_HANDLERS: dict[tuple[type, ModelEvent], list[Callable[..., Any]]] = {}
|
||||||
|
_WATCHED_MODELS: set[type] = set()
|
||||||
|
_WATCHED_CACHE: dict[type, bool] = {}
|
||||||
|
_HANDLER_CACHE: dict[tuple[type, ModelEvent], list[Callable[..., Any]]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _invalidate_caches() -> None:
|
||||||
|
"""Clear lookup caches after handler registration."""
|
||||||
|
_WATCHED_CACHE.clear()
|
||||||
|
_HANDLER_CACHE.clear()
|
||||||
|
|
||||||
|
|
||||||
|
def listens_for(
|
||||||
|
model_class: type,
|
||||||
|
event_types: list[ModelEvent] | None = None,
|
||||||
|
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
||||||
|
"""Register a callback for one or more model lifecycle events.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
*fields: One or more field names to watch. At least one name is required.
|
model_class: The SQLAlchemy model class to listen on.
|
||||||
|
event_types: List of :class:`ModelEvent` values to listen for.
|
||||||
Raises:
|
Defaults to all event types.
|
||||||
ValueError: If called with no field names.
|
|
||||||
"""
|
"""
|
||||||
if not fields:
|
evs = event_types if event_types is not None else list(ModelEvent)
|
||||||
raise ValueError("@watch requires at least one field name.")
|
|
||||||
|
|
||||||
def decorator(cls: type[_T]) -> type[_T]:
|
def decorator(fn: Callable[..., Any]) -> Callable[..., Any]:
|
||||||
_WATCHED_FIELDS[cls] = list(fields)
|
for ev in evs:
|
||||||
return cls
|
_EVENT_HANDLERS.setdefault((model_class, ev), []).append(fn)
|
||||||
|
_WATCHED_MODELS.add(model_class)
|
||||||
|
_invalidate_caches()
|
||||||
|
return fn
|
||||||
|
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
def _is_watched(obj: Any) -> bool:
|
||||||
|
"""Return True if *obj*'s type (or any ancestor) has registered handlers."""
|
||||||
|
cls = type(obj)
|
||||||
|
try:
|
||||||
|
return _WATCHED_CACHE[cls]
|
||||||
|
except KeyError:
|
||||||
|
result = any(klass in _WATCHED_MODELS for klass in cls.__mro__)
|
||||||
|
_WATCHED_CACHE[cls] = result
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _get_handlers(cls: type, ev: ModelEvent) -> list[Callable[..., Any]]:
|
||||||
|
"""Return registered handlers for *cls* and *ev*, walking the MRO."""
|
||||||
|
key = (cls, ev)
|
||||||
|
try:
|
||||||
|
return _HANDLER_CACHE[key]
|
||||||
|
except KeyError:
|
||||||
|
handlers: list[Callable[..., Any]] = []
|
||||||
|
for klass in cls.__mro__:
|
||||||
|
handlers.extend(_EVENT_HANDLERS.get((klass, ev), []))
|
||||||
|
_HANDLER_CACHE[key] = handlers
|
||||||
|
return handlers
|
||||||
|
|
||||||
|
|
||||||
def _snapshot_column_attrs(obj: Any) -> dict[str, Any]:
|
def _snapshot_column_attrs(obj: Any) -> dict[str, Any]:
|
||||||
"""Read currently-loaded column values into a plain dict."""
|
"""Read currently-loaded column values into a plain dict."""
|
||||||
state = sa_inspect(obj) # InstanceState
|
state = sa_inspect(obj) # InstanceState
|
||||||
state_dict = state.dict
|
state_dict = state.dict
|
||||||
return {
|
snapshot: dict[str, Any] = {}
|
||||||
prop.key: state_dict[prop.key]
|
for prop in state.mapper.column_attrs:
|
||||||
for prop in state.mapper.column_attrs
|
if prop.key in state_dict:
|
||||||
if prop.key in state_dict
|
snapshot[prop.key] = state_dict[prop.key]
|
||||||
}
|
elif ( # pragma: no cover
|
||||||
|
not state.expired
|
||||||
|
and prop.strategy_key != _DEFERRED_STRATEGY_KEY
|
||||||
|
and all(
|
||||||
|
col.nullable
|
||||||
|
and col.server_default is None
|
||||||
|
and col.server_onupdate is None
|
||||||
|
for col in prop.columns
|
||||||
|
)
|
||||||
|
):
|
||||||
|
snapshot[prop.key] = None
|
||||||
|
return snapshot
|
||||||
|
|
||||||
|
|
||||||
def _get_watched_fields(cls: type) -> list[str] | None:
|
def _get_watched_fields(cls: type) -> tuple[str, ...] | None:
|
||||||
"""Return the watched fields for *cls*, walking the MRO to inherit from parents."""
|
"""Return the watched fields for *cls*."""
|
||||||
for klass in cls.__mro__:
|
fields = getattr(cls, "__watched_fields__", None)
|
||||||
if klass in _WATCHED_FIELDS:
|
if fields is not None and (
|
||||||
return _WATCHED_FIELDS[klass]
|
not isinstance(fields, tuple) or not all(isinstance(f, str) for f in fields)
|
||||||
return None
|
):
|
||||||
|
raise TypeError(
|
||||||
|
f"{cls.__name__}.__watched_fields__ must be a tuple[str, ...], "
|
||||||
|
f"got {type(fields).__name__}"
|
||||||
|
)
|
||||||
|
return fields
|
||||||
|
|
||||||
|
|
||||||
def _upsert_changes(
|
def _upsert_changes(
|
||||||
@@ -93,50 +140,32 @@ def _upsert_changes(
|
|||||||
pending[key] = (obj, changes)
|
pending[key] = (obj, changes)
|
||||||
|
|
||||||
|
|
||||||
@event.listens_for(AsyncSession.sync_session_class, "after_transaction_create")
|
|
||||||
def _after_transaction_create(session: Any, transaction: Any) -> None:
|
|
||||||
if transaction.nested:
|
|
||||||
session.info[_SESSION_SAVEPOINT_DEPTH] = (
|
|
||||||
session.info.get(_SESSION_SAVEPOINT_DEPTH, 0) + 1
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@event.listens_for(AsyncSession.sync_session_class, "after_transaction_end")
|
|
||||||
def _after_transaction_end(session: Any, transaction: Any) -> None:
|
|
||||||
if transaction.nested:
|
|
||||||
depth = session.info.get(_SESSION_SAVEPOINT_DEPTH, 0)
|
|
||||||
if depth > 0: # pragma: no branch
|
|
||||||
session.info[_SESSION_SAVEPOINT_DEPTH] = depth - 1
|
|
||||||
|
|
||||||
|
|
||||||
@event.listens_for(AsyncSession.sync_session_class, "after_flush")
|
@event.listens_for(AsyncSession.sync_session_class, "after_flush")
|
||||||
def _after_flush(session: Any, flush_context: Any) -> None:
|
def _after_flush(session: Any, flush_context: Any) -> None:
|
||||||
# New objects: capture references while session.new is still populated.
|
# New objects: capture reference. Attributes will be refreshed after commit.
|
||||||
# Values are read in _after_flush_postexec once RETURNING has been processed.
|
|
||||||
for obj in session.new:
|
for obj in session.new:
|
||||||
if isinstance(obj, WatchedFieldsMixin):
|
if _is_watched(obj):
|
||||||
session.info.setdefault(_SESSION_PENDING_NEW, []).append(obj)
|
session.info.setdefault(_SESSION_CREATES, []).append(obj)
|
||||||
|
|
||||||
# Deleted objects: capture before they leave the identity map.
|
# Deleted objects: snapshot now while attributes are still loaded.
|
||||||
for obj in session.deleted:
|
for obj in session.deleted:
|
||||||
if isinstance(obj, WatchedFieldsMixin):
|
if _is_watched(obj):
|
||||||
session.info.setdefault(_SESSION_DELETES, []).append(obj)
|
snapshot = _snapshot_column_attrs(obj)
|
||||||
|
session.info.setdefault(_SESSION_DELETES, []).append((obj, snapshot))
|
||||||
|
|
||||||
# Dirty objects: read old/new from SQLAlchemy attribute history.
|
# Dirty objects: read old/new from SQLAlchemy attribute history.
|
||||||
for obj in session.dirty:
|
for obj in session.dirty:
|
||||||
if not isinstance(obj, WatchedFieldsMixin):
|
if not _is_watched(obj):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# None = not in dict = watch all fields; list = specific fields only
|
|
||||||
watched = _get_watched_fields(type(obj))
|
watched = _get_watched_fields(type(obj))
|
||||||
changes: dict[str, dict[str, Any]] = {}
|
changes: dict[str, dict[str, Any]] = {}
|
||||||
|
|
||||||
|
inst_attrs = sa_inspect(obj).attrs
|
||||||
attrs = (
|
attrs = (
|
||||||
# Specific fields
|
((field, inst_attrs[field]) for field in watched)
|
||||||
((field, sa_inspect(obj).attrs[field]) for field in watched)
|
|
||||||
if watched is not None
|
if watched is not None
|
||||||
# All mapped fields
|
else ((s.key, s) for s in inst_attrs)
|
||||||
else ((s.key, s) for s in sa_inspect(obj).attrs)
|
|
||||||
)
|
)
|
||||||
for field, attr_state in attrs:
|
for field, attr_state in attrs:
|
||||||
history = attr_state.history
|
history = attr_state.history
|
||||||
@@ -154,116 +183,108 @@ def _after_flush(session: Any, flush_context: Any) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@event.listens_for(AsyncSession.sync_session_class, "after_flush_postexec")
|
|
||||||
def _after_flush_postexec(session: Any, flush_context: Any) -> None:
|
|
||||||
# New objects are now persistent and RETURNING values have been applied,
|
|
||||||
# so server defaults (id, created_at, …) are available via getattr.
|
|
||||||
pending_new: list[Any] = session.info.pop(_SESSION_PENDING_NEW, [])
|
|
||||||
if not pending_new:
|
|
||||||
return
|
|
||||||
session.info.setdefault(_SESSION_CREATES, []).extend(pending_new)
|
|
||||||
|
|
||||||
|
|
||||||
@event.listens_for(AsyncSession.sync_session_class, "after_rollback")
|
@event.listens_for(AsyncSession.sync_session_class, "after_rollback")
|
||||||
def _after_rollback(session: Any) -> None:
|
def _after_rollback(session: Any) -> None:
|
||||||
session.info.pop(_SESSION_PENDING_NEW, None)
|
if session.in_transaction():
|
||||||
|
return
|
||||||
session.info.pop(_SESSION_CREATES, None)
|
session.info.pop(_SESSION_CREATES, None)
|
||||||
session.info.pop(_SESSION_DELETES, None)
|
session.info.pop(_SESSION_DELETES, None)
|
||||||
session.info.pop(_SESSION_UPDATES, None)
|
session.info.pop(_SESSION_UPDATES, None)
|
||||||
|
|
||||||
|
|
||||||
def _task_error_handler(task: asyncio.Task[Any]) -> None:
|
async def _invoke_callback(
|
||||||
if not task.cancelled() and (exc := task.exception()):
|
fn: Callable[..., Any],
|
||||||
_logger.error(_CALLBACK_ERROR_MSG, exc_info=exc)
|
obj: Any,
|
||||||
|
event_type: ModelEvent,
|
||||||
|
changes: dict[str, dict[str, Any]] | None,
|
||||||
def _schedule_with_snapshot(
|
|
||||||
loop: asyncio.AbstractEventLoop, obj: Any, fn: Any, *args: Any
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Snapshot *obj*'s column attrs now (before expire_on_commit wipes them),
|
"""Call *fn* and await the result if it is awaitable."""
|
||||||
then schedule a coroutine that restores the snapshot and calls *fn*.
|
result = fn(obj, event_type, changes)
|
||||||
"""
|
|
||||||
snapshot = _snapshot_column_attrs(obj)
|
|
||||||
|
|
||||||
async def _run(
|
|
||||||
obj: Any = obj,
|
|
||||||
fn: Any = fn,
|
|
||||||
snapshot: dict[str, Any] = snapshot,
|
|
||||||
args: tuple = args,
|
|
||||||
) -> None:
|
|
||||||
for key, value in snapshot.items():
|
|
||||||
_sa_set_committed_value(obj, key, value)
|
|
||||||
try:
|
|
||||||
result = fn(*args)
|
|
||||||
if inspect.isawaitable(result):
|
if inspect.isawaitable(result):
|
||||||
await result
|
await result
|
||||||
except Exception as exc:
|
|
||||||
_logger.error(_CALLBACK_ERROR_MSG, exc_info=exc)
|
|
||||||
|
|
||||||
task = loop.create_task(_run())
|
|
||||||
task.add_done_callback(_task_error_handler)
|
|
||||||
|
|
||||||
|
|
||||||
@event.listens_for(AsyncSession.sync_session_class, "after_commit")
|
class EventSession(AsyncSession):
|
||||||
def _after_commit(session: Any) -> None:
|
"""AsyncSession subclass that dispatches lifecycle callbacks after commit."""
|
||||||
if session.info.get(_SESSION_SAVEPOINT_DEPTH, 0) > 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
creates: list[Any] = session.info.pop(_SESSION_CREATES, [])
|
async def commit(self) -> None: # noqa: C901
|
||||||
deletes: list[Any] = session.info.pop(_SESSION_DELETES, [])
|
await super().commit()
|
||||||
field_changes: dict[int, tuple[Any, dict[str, dict[str, Any]]]] = session.info.pop(
|
|
||||||
|
creates: list[Any] = self.info.pop(_SESSION_CREATES, [])
|
||||||
|
deletes: list[tuple[Any, dict[str, Any]]] = self.info.pop(_SESSION_DELETES, [])
|
||||||
|
field_changes: dict[int, tuple[Any, dict[str, dict[str, Any]]]] = self.info.pop(
|
||||||
_SESSION_UPDATES, {}
|
_SESSION_UPDATES, {}
|
||||||
)
|
)
|
||||||
|
|
||||||
if creates and deletes:
|
|
||||||
transient_ids = {id(o) for o in creates} & {id(o) for o in deletes}
|
|
||||||
if transient_ids:
|
|
||||||
creates = [o for o in creates if id(o) not in transient_ids]
|
|
||||||
deletes = [o for o in deletes if id(o) not in transient_ids]
|
|
||||||
field_changes = {
|
|
||||||
k: v for k, v in field_changes.items() if k not in transient_ids
|
|
||||||
}
|
|
||||||
|
|
||||||
if not creates and not deletes and not field_changes:
|
if not creates and not deletes and not field_changes:
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
# Suppress transient objects (created + deleted in same transaction).
|
||||||
loop = asyncio.get_running_loop()
|
if creates and deletes:
|
||||||
except RuntimeError:
|
created_ids = {id(o) for o in creates}
|
||||||
return
|
deleted_ids = {id(o) for o, _ in deletes}
|
||||||
|
transient_ids = created_ids & deleted_ids
|
||||||
|
if transient_ids:
|
||||||
|
creates = [o for o in creates if id(o) not in transient_ids]
|
||||||
|
deletes = [(o, s) for o, s in deletes if id(o) not in transient_ids]
|
||||||
|
field_changes = {
|
||||||
|
k: v for k, v in field_changes.items() if k not in transient_ids
|
||||||
|
}
|
||||||
|
|
||||||
|
# Suppress updates for deleted objects (row is gone, refresh would fail).
|
||||||
|
if deletes and field_changes:
|
||||||
|
deleted_ids = {id(o) for o, _ in deletes}
|
||||||
|
field_changes = {
|
||||||
|
k: v for k, v in field_changes.items() if k not in deleted_ids
|
||||||
|
}
|
||||||
|
|
||||||
|
# Suppress updates for newly created objects (CREATE-only semantics).
|
||||||
|
if creates and field_changes:
|
||||||
|
create_ids = {id(o) for o in creates}
|
||||||
|
field_changes = {
|
||||||
|
k: v for k, v in field_changes.items() if k not in create_ids
|
||||||
|
}
|
||||||
|
|
||||||
|
# Dispatch CREATE callbacks.
|
||||||
for obj in creates:
|
for obj in creates:
|
||||||
_schedule_with_snapshot(loop, obj, obj.on_create)
|
try:
|
||||||
|
state = sa_inspect(obj, raiseerr=False)
|
||||||
|
if (
|
||||||
|
state is None or state.detached or state.transient
|
||||||
|
): # pragma: no cover
|
||||||
|
continue
|
||||||
|
await self.refresh(obj)
|
||||||
|
for handler in _get_handlers(type(obj), ModelEvent.CREATE):
|
||||||
|
await _invoke_callback(handler, obj, ModelEvent.CREATE, None)
|
||||||
|
except Exception as exc:
|
||||||
|
_logger.error(_CALLBACK_ERROR_MSG, exc_info=exc)
|
||||||
|
|
||||||
for obj in deletes:
|
# Dispatch DELETE callbacks (restore snapshot; row is gone).
|
||||||
_schedule_with_snapshot(loop, obj, obj.on_delete)
|
for obj, snapshot in deletes:
|
||||||
|
try:
|
||||||
|
for key, value in snapshot.items():
|
||||||
|
_sa_set_committed_value(obj, key, value)
|
||||||
|
for handler in _get_handlers(type(obj), ModelEvent.DELETE):
|
||||||
|
await _invoke_callback(handler, obj, ModelEvent.DELETE, None)
|
||||||
|
except Exception as exc:
|
||||||
|
_logger.error(_CALLBACK_ERROR_MSG, exc_info=exc)
|
||||||
|
|
||||||
|
# Dispatch UPDATE callbacks.
|
||||||
for obj, changes in field_changes.values():
|
for obj, changes in field_changes.values():
|
||||||
_schedule_with_snapshot(loop, obj, obj.on_update, changes)
|
try:
|
||||||
|
state = sa_inspect(obj, raiseerr=False)
|
||||||
|
if (
|
||||||
|
state is None or state.detached or state.transient
|
||||||
|
): # pragma: no cover
|
||||||
|
continue
|
||||||
|
await self.refresh(obj)
|
||||||
|
for handler in _get_handlers(type(obj), ModelEvent.UPDATE):
|
||||||
|
await _invoke_callback(handler, obj, ModelEvent.UPDATE, changes)
|
||||||
|
except Exception as exc:
|
||||||
|
_logger.error(_CALLBACK_ERROR_MSG, exc_info=exc)
|
||||||
|
|
||||||
|
async def rollback(self) -> None:
|
||||||
class WatchedFieldsMixin:
|
await super().rollback()
|
||||||
"""Mixin that enables lifecycle callbacks for SQLAlchemy models."""
|
self.info.pop(_SESSION_CREATES, None)
|
||||||
|
self.info.pop(_SESSION_DELETES, None)
|
||||||
def on_event(
|
self.info.pop(_SESSION_UPDATES, None)
|
||||||
self, event: ModelEvent, changes: dict[str, dict[str, Any]] | None = None
|
|
||||||
) -> Awaitable[None] | None:
|
|
||||||
"""Catch-all callback fired for every lifecycle event.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
event: The event type (:attr:`ModelEvent.CREATE`, :attr:`ModelEvent.DELETE`,
|
|
||||||
or :attr:`ModelEvent.UPDATE`).
|
|
||||||
changes: Field changes for :attr:`ModelEvent.UPDATE`, ``None`` otherwise.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def on_create(self) -> Awaitable[None] | None:
|
|
||||||
"""Called after INSERT commit."""
|
|
||||||
return self.on_event(ModelEvent.CREATE)
|
|
||||||
|
|
||||||
def on_delete(self) -> Awaitable[None] | None:
|
|
||||||
"""Called after DELETE commit."""
|
|
||||||
return self.on_event(ModelEvent.DELETE)
|
|
||||||
|
|
||||||
def on_update(self, changes: dict[str, dict[str, Any]]) -> Awaitable[None] | None:
|
|
||||||
"""Called after UPDATE commit when watched fields change."""
|
|
||||||
return self.on_event(ModelEvent.UPDATE, changes=changes)
|
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from contextlib import asynccontextmanager
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from httpx import ASGITransport, AsyncClient
|
from httpx import ASGITransport, AsyncClient
|
||||||
|
from sqlalchemy import text
|
||||||
from sqlalchemy.engine import make_url
|
from sqlalchemy.engine import make_url
|
||||||
from sqlalchemy.ext.asyncio import (
|
from sqlalchemy.ext.asyncio import (
|
||||||
AsyncSession,
|
AsyncSession,
|
||||||
@@ -15,13 +16,9 @@ from sqlalchemy.ext.asyncio import (
|
|||||||
)
|
)
|
||||||
from sqlalchemy.orm import DeclarativeBase
|
from sqlalchemy.orm import DeclarativeBase
|
||||||
|
|
||||||
from sqlalchemy import text
|
from ..db import cleanup_tables as _cleanup_tables
|
||||||
|
from ..db import create_database
|
||||||
from ..db import (
|
from ..models.watched import EventSession
|
||||||
cleanup_tables as _cleanup_tables,
|
|
||||||
create_database,
|
|
||||||
create_db_context,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def cleanup_tables(
|
async def cleanup_tables(
|
||||||
@@ -269,15 +266,14 @@ async def create_db_session(
|
|||||||
async with engine.begin() as conn:
|
async with engine.begin() as conn:
|
||||||
await conn.run_sync(base.metadata.create_all)
|
await conn.run_sync(base.metadata.create_all)
|
||||||
|
|
||||||
# Create session using existing db context utility
|
session_maker = async_sessionmaker(
|
||||||
session_maker = async_sessionmaker(engine, expire_on_commit=expire_on_commit)
|
engine, expire_on_commit=expire_on_commit, class_=EventSession
|
||||||
get_session = create_db_context(session_maker)
|
)
|
||||||
|
async with session_maker() as session:
|
||||||
async with get_session() as session:
|
|
||||||
yield session
|
yield session
|
||||||
|
|
||||||
if cleanup:
|
if cleanup:
|
||||||
await cleanup_tables(session, base)
|
await _cleanup_tables(session=session, base=base)
|
||||||
|
|
||||||
if drop_tables:
|
if drop_tables:
|
||||||
async with engine.begin() as conn:
|
async with engine.begin() as conn:
|
||||||
|
|||||||
@@ -162,6 +162,7 @@ class PaginatedResponse(BaseResponse, Generic[DataT]):
|
|||||||
pagination: OffsetPagination | CursorPagination
|
pagination: OffsetPagination | CursorPagination
|
||||||
pagination_type: PaginationType | None = None
|
pagination_type: PaginationType | None = None
|
||||||
filter_attributes: dict[str, list[Any]] | None = None
|
filter_attributes: dict[str, list[Any]] | None = None
|
||||||
|
search_columns: list[str] | None = None
|
||||||
|
|
||||||
_discriminated_union_cache: ClassVar[dict[Any, Any]] = {}
|
_discriminated_union_cache: ClassVar[dict[Any, Any]] = {}
|
||||||
|
|
||||||
|
|||||||
24
src/fastapi_toolsets/security/__init__.py
Normal file
24
src/fastapi_toolsets/security/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
"""Authentication helpers for FastAPI using Security()."""
|
||||||
|
|
||||||
|
from .abc import AuthSource
|
||||||
|
from .oauth import (
|
||||||
|
oauth_build_authorization_redirect,
|
||||||
|
oauth_decode_state,
|
||||||
|
oauth_encode_state,
|
||||||
|
oauth_fetch_userinfo,
|
||||||
|
oauth_resolve_provider_urls,
|
||||||
|
)
|
||||||
|
from .sources import APIKeyHeaderAuth, BearerTokenAuth, CookieAuth, MultiAuth
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"APIKeyHeaderAuth",
|
||||||
|
"AuthSource",
|
||||||
|
"BearerTokenAuth",
|
||||||
|
"CookieAuth",
|
||||||
|
"MultiAuth",
|
||||||
|
"oauth_build_authorization_redirect",
|
||||||
|
"oauth_decode_state",
|
||||||
|
"oauth_encode_state",
|
||||||
|
"oauth_fetch_userinfo",
|
||||||
|
"oauth_resolve_provider_urls",
|
||||||
|
]
|
||||||
53
src/fastapi_toolsets/security/abc.py
Normal file
53
src/fastapi_toolsets/security/abc.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
"""Abstract base class for authentication sources."""
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any, Callable
|
||||||
|
|
||||||
|
from fastapi import Request
|
||||||
|
from fastapi.security import SecurityScopes
|
||||||
|
|
||||||
|
from fastapi_toolsets.exceptions import UnauthorizedError
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_async(fn: Callable[..., Any]) -> Callable[..., Any]:
|
||||||
|
"""Wrap *fn* so it can always be awaited, caching the coroutine check at init time."""
|
||||||
|
if inspect.iscoroutinefunction(fn):
|
||||||
|
return fn
|
||||||
|
|
||||||
|
async def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||||
|
return fn(*args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
class AuthSource(ABC):
|
||||||
|
"""Abstract base class for authentication sources."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Set up the default FastAPI dependency signature."""
|
||||||
|
source = self
|
||||||
|
|
||||||
|
async def _call(
|
||||||
|
request: Request,
|
||||||
|
security_scopes: SecurityScopes, # noqa: ARG001
|
||||||
|
) -> Any:
|
||||||
|
credential = await source.extract(request)
|
||||||
|
if credential is None:
|
||||||
|
raise UnauthorizedError()
|
||||||
|
return await source.authenticate(credential)
|
||||||
|
|
||||||
|
self._call_fn: Callable[..., Any] = _call
|
||||||
|
self.__signature__ = inspect.signature(_call)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def extract(self, request: Request) -> str | None:
|
||||||
|
"""Extract the raw credential from the request without validating."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def authenticate(self, credential: str) -> Any:
|
||||||
|
"""Validate a credential and return the authenticated identity."""
|
||||||
|
|
||||||
|
async def __call__(self, **kwargs: Any) -> Any:
|
||||||
|
"""FastAPI dependency dispatch."""
|
||||||
|
return await self._call_fn(**kwargs)
|
||||||
140
src/fastapi_toolsets/security/oauth.py
Normal file
140
src/fastapi_toolsets/security/oauth.py
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
"""OAuth 2.0 / OIDC helper utilities."""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
from typing import Any
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from fastapi.responses import RedirectResponse
|
||||||
|
|
||||||
|
_discovery_cache: dict[str, dict] = {}
|
||||||
|
|
||||||
|
|
||||||
|
async def oauth_resolve_provider_urls(
|
||||||
|
discovery_url: str,
|
||||||
|
) -> tuple[str, str, str | None]:
|
||||||
|
"""Fetch the OIDC discovery document and return endpoint URLs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
discovery_url: URL of the provider's ``/.well-known/openid-configuration``.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A ``(authorization_url, token_url, userinfo_url)`` tuple.
|
||||||
|
*userinfo_url* is ``None`` when the provider does not advertise one.
|
||||||
|
"""
|
||||||
|
if discovery_url not in _discovery_cache:
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
resp = await client.get(discovery_url)
|
||||||
|
resp.raise_for_status()
|
||||||
|
_discovery_cache[discovery_url] = resp.json()
|
||||||
|
cfg = _discovery_cache[discovery_url]
|
||||||
|
return (
|
||||||
|
cfg["authorization_endpoint"],
|
||||||
|
cfg["token_endpoint"],
|
||||||
|
cfg.get("userinfo_endpoint"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def oauth_fetch_userinfo(
|
||||||
|
*,
|
||||||
|
token_url: str,
|
||||||
|
userinfo_url: str,
|
||||||
|
code: str,
|
||||||
|
client_id: str,
|
||||||
|
client_secret: str,
|
||||||
|
redirect_uri: str,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Exchange an authorization code for tokens and return the userinfo payload.
|
||||||
|
|
||||||
|
Performs the two-step OAuth 2.0 / OIDC token exchange:
|
||||||
|
|
||||||
|
1. POSTs the authorization *code* to *token_url* to obtain an access token.
|
||||||
|
2. GETs *userinfo_url* using that access token as a Bearer credential.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
token_url: Provider's token endpoint.
|
||||||
|
userinfo_url: Provider's userinfo endpoint.
|
||||||
|
code: Authorization code received from the provider's callback.
|
||||||
|
client_id: OAuth application client ID.
|
||||||
|
client_secret: OAuth application client secret.
|
||||||
|
redirect_uri: Redirect URI that was used in the authorization request.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The JSON payload returned by the userinfo endpoint as a plain ``dict``.
|
||||||
|
"""
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
token_resp = await client.post(
|
||||||
|
token_url,
|
||||||
|
data={
|
||||||
|
"grant_type": "authorization_code",
|
||||||
|
"code": code,
|
||||||
|
"client_id": client_id,
|
||||||
|
"client_secret": client_secret,
|
||||||
|
"redirect_uri": redirect_uri,
|
||||||
|
},
|
||||||
|
headers={"Accept": "application/json"},
|
||||||
|
)
|
||||||
|
token_resp.raise_for_status()
|
||||||
|
access_token = token_resp.json()["access_token"]
|
||||||
|
|
||||||
|
userinfo_resp = await client.get(
|
||||||
|
userinfo_url,
|
||||||
|
headers={"Authorization": f"Bearer {access_token}"},
|
||||||
|
)
|
||||||
|
userinfo_resp.raise_for_status()
|
||||||
|
return userinfo_resp.json()
|
||||||
|
|
||||||
|
|
||||||
|
def oauth_build_authorization_redirect(
|
||||||
|
authorization_url: str,
|
||||||
|
*,
|
||||||
|
client_id: str,
|
||||||
|
scopes: str,
|
||||||
|
redirect_uri: str,
|
||||||
|
destination: str,
|
||||||
|
) -> RedirectResponse:
|
||||||
|
"""Return an OAuth 2.0 authorization ``RedirectResponse``.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
authorization_url: Provider's authorization endpoint.
|
||||||
|
client_id: OAuth application client ID.
|
||||||
|
scopes: Space-separated list of requested scopes.
|
||||||
|
redirect_uri: URI the provider should redirect back to after authorization.
|
||||||
|
destination: URL the user should be sent to after the full OAuth flow
|
||||||
|
completes (encoded as ``state``).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A :class:`~fastapi.responses.RedirectResponse` to the provider's
|
||||||
|
authorization page.
|
||||||
|
"""
|
||||||
|
params = urlencode(
|
||||||
|
{
|
||||||
|
"client_id": client_id,
|
||||||
|
"response_type": "code",
|
||||||
|
"scope": scopes,
|
||||||
|
"redirect_uri": redirect_uri,
|
||||||
|
"state": oauth_encode_state(destination),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return RedirectResponse(f"{authorization_url}?{params}")
|
||||||
|
|
||||||
|
|
||||||
|
def oauth_encode_state(url: str) -> str:
|
||||||
|
"""Base64url-encode a URL to embed as an OAuth ``state`` parameter."""
|
||||||
|
return base64.urlsafe_b64encode(url.encode()).decode()
|
||||||
|
|
||||||
|
|
||||||
|
def oauth_decode_state(state: str | None, *, fallback: str) -> str:
|
||||||
|
"""Decode a base64url OAuth ``state`` parameter.
|
||||||
|
|
||||||
|
Handles missing padding (some providers strip ``=``).
|
||||||
|
Returns *fallback* if *state* is absent, the literal string ``"null"``,
|
||||||
|
or cannot be decoded.
|
||||||
|
"""
|
||||||
|
if not state or state == "null":
|
||||||
|
return fallback
|
||||||
|
try:
|
||||||
|
padded = state + "=" * (4 - len(state) % 4)
|
||||||
|
return base64.urlsafe_b64decode(padded).decode()
|
||||||
|
except Exception:
|
||||||
|
return fallback
|
||||||
8
src/fastapi_toolsets/security/sources/__init__.py
Normal file
8
src/fastapi_toolsets/security/sources/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
"""Built-in authentication source implementations."""
|
||||||
|
|
||||||
|
from .header import APIKeyHeaderAuth
|
||||||
|
from .bearer import BearerTokenAuth
|
||||||
|
from .cookie import CookieAuth
|
||||||
|
from .multi import MultiAuth
|
||||||
|
|
||||||
|
__all__ = ["APIKeyHeaderAuth", "BearerTokenAuth", "CookieAuth", "MultiAuth"]
|
||||||
120
src/fastapi_toolsets/security/sources/bearer.py
Normal file
120
src/fastapi_toolsets/security/sources/bearer.py
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
"""Bearer token authentication source."""
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
import secrets
|
||||||
|
from typing import Annotated, Any, Callable
|
||||||
|
|
||||||
|
from fastapi import Depends
|
||||||
|
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer, SecurityScopes
|
||||||
|
|
||||||
|
from fastapi_toolsets.exceptions import UnauthorizedError
|
||||||
|
|
||||||
|
from ..abc import AuthSource, _ensure_async
|
||||||
|
|
||||||
|
|
||||||
|
class BearerTokenAuth(AuthSource):
|
||||||
|
"""Bearer token authentication source.
|
||||||
|
|
||||||
|
Wraps :class:`fastapi.security.HTTPBearer` for OpenAPI documentation.
|
||||||
|
The validator is called as ``await validator(credential, **kwargs)``
|
||||||
|
where ``kwargs`` are the extra keyword arguments provided at instantiation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
validator: Sync or async callable that receives the credential and any
|
||||||
|
extra keyword arguments, and returns the authenticated identity
|
||||||
|
(e.g. a ``User`` model). Should raise
|
||||||
|
:class:`~fastapi_toolsets.exceptions.UnauthorizedError` on failure.
|
||||||
|
prefix: Optional token prefix (e.g. ``"user_"``). If set, only tokens
|
||||||
|
whose value starts with this prefix are matched. The prefix is
|
||||||
|
**kept** in the value passed to the validator — store and compare
|
||||||
|
tokens with their prefix included. Use :meth:`generate_token` to
|
||||||
|
create correctly-prefixed tokens. This enables multiple
|
||||||
|
``BearerTokenAuth`` instances in the same app (e.g. ``"user_"``
|
||||||
|
for user tokens, ``"org_"`` for org tokens).
|
||||||
|
**kwargs: Extra keyword arguments forwarded to the validator on every
|
||||||
|
call (e.g. ``role=Role.ADMIN``).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
validator: Callable[..., Any],
|
||||||
|
*,
|
||||||
|
prefix: str | None = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> None:
|
||||||
|
self._validator = _ensure_async(validator)
|
||||||
|
self._prefix = prefix
|
||||||
|
self._kwargs = kwargs
|
||||||
|
self._scheme = HTTPBearer(auto_error=False)
|
||||||
|
|
||||||
|
async def _call(
|
||||||
|
security_scopes: SecurityScopes, # noqa: ARG001
|
||||||
|
credentials: Annotated[
|
||||||
|
HTTPAuthorizationCredentials | None, Depends(self._scheme)
|
||||||
|
] = None,
|
||||||
|
) -> Any:
|
||||||
|
if credentials is None:
|
||||||
|
raise UnauthorizedError()
|
||||||
|
return await self._validate(credentials.credentials)
|
||||||
|
|
||||||
|
self._call_fn = _call
|
||||||
|
self.__signature__ = inspect.signature(_call)
|
||||||
|
|
||||||
|
async def _validate(self, token: str) -> Any:
|
||||||
|
"""Check prefix and call the validator."""
|
||||||
|
if self._prefix is not None and not token.startswith(self._prefix):
|
||||||
|
raise UnauthorizedError()
|
||||||
|
return await self._validator(token, **self._kwargs)
|
||||||
|
|
||||||
|
async def extract(self, request: Any) -> str | None:
|
||||||
|
"""Extract the raw credential from the request without validating.
|
||||||
|
|
||||||
|
Returns ``None`` if no ``Authorization: Bearer`` header is present,
|
||||||
|
the token is empty, or the token does not match the configured prefix.
|
||||||
|
The prefix is included in the returned value.
|
||||||
|
"""
|
||||||
|
auth = request.headers.get("Authorization", "")
|
||||||
|
if not auth.startswith("Bearer "):
|
||||||
|
return None
|
||||||
|
token = auth[7:]
|
||||||
|
if not token:
|
||||||
|
return None
|
||||||
|
if self._prefix is not None and not token.startswith(self._prefix):
|
||||||
|
return None
|
||||||
|
return token
|
||||||
|
|
||||||
|
async def authenticate(self, credential: str) -> Any:
|
||||||
|
"""Validate a credential and return the identity.
|
||||||
|
|
||||||
|
Calls ``await validator(credential, **kwargs)`` where ``kwargs`` are
|
||||||
|
the extra keyword arguments provided at instantiation.
|
||||||
|
"""
|
||||||
|
return await self._validate(credential)
|
||||||
|
|
||||||
|
def require(self, **kwargs: Any) -> "BearerTokenAuth":
|
||||||
|
"""Return a new instance with additional (or overriding) validator kwargs."""
|
||||||
|
return BearerTokenAuth(
|
||||||
|
self._validator,
|
||||||
|
prefix=self._prefix,
|
||||||
|
**{**self._kwargs, **kwargs},
|
||||||
|
)
|
||||||
|
|
||||||
|
def generate_token(self, nbytes: int = 32) -> str:
|
||||||
|
"""Generate a secure random token for this auth source.
|
||||||
|
|
||||||
|
Returns a URL-safe random token. If a prefix is configured it is
|
||||||
|
prepended — the returned value is what you store in your database
|
||||||
|
and return to the client as-is.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
nbytes: Number of random bytes before base64 encoding. The
|
||||||
|
resulting string is ``ceil(nbytes * 4 / 3)`` characters
|
||||||
|
(43 chars for the default 32 bytes). Defaults to 32.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A ready-to-use token string (e.g. ``"user_Xk3..."``).
|
||||||
|
"""
|
||||||
|
token = secrets.token_urlsafe(nbytes)
|
||||||
|
if self._prefix is not None:
|
||||||
|
return f"{self._prefix}{token}"
|
||||||
|
return token
|
||||||
139
src/fastapi_toolsets/security/sources/cookie.py
Normal file
139
src/fastapi_toolsets/security/sources/cookie.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
"""Cookie-based authentication source."""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import inspect
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
from typing import Annotated, Any, Callable
|
||||||
|
|
||||||
|
from fastapi import Depends, Request, Response
|
||||||
|
from fastapi.security import APIKeyCookie, SecurityScopes
|
||||||
|
|
||||||
|
from fastapi_toolsets.exceptions import UnauthorizedError
|
||||||
|
|
||||||
|
from ..abc import AuthSource, _ensure_async
|
||||||
|
|
||||||
|
|
||||||
|
class CookieAuth(AuthSource):
|
||||||
|
"""Cookie-based authentication source.
|
||||||
|
|
||||||
|
Wraps :class:`fastapi.security.APIKeyCookie` for OpenAPI documentation.
|
||||||
|
Optionally signs the cookie with HMAC-SHA256 to provide stateless, tamper-
|
||||||
|
proof sessions without any database entry.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Cookie name.
|
||||||
|
validator: Sync or async callable that receives the cookie value
|
||||||
|
(plain, after signature verification when ``secret_key`` is set)
|
||||||
|
and any extra keyword arguments, and returns the authenticated
|
||||||
|
identity.
|
||||||
|
secret_key: When provided, the cookie is HMAC-SHA256 signed.
|
||||||
|
:meth:`set_cookie` embeds an expiry and signs the payload;
|
||||||
|
:meth:`extract` verifies the signature and expiry before handing
|
||||||
|
the plain value to the validator. When ``None`` (default), the raw
|
||||||
|
cookie value is passed to the validator as-is.
|
||||||
|
ttl: Cookie lifetime in seconds (default 24 h). Only used when
|
||||||
|
``secret_key`` is set.
|
||||||
|
**kwargs: Extra keyword arguments forwarded to the validator on every
|
||||||
|
call (e.g. ``role=Role.ADMIN``).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
validator: Callable[..., Any],
|
||||||
|
*,
|
||||||
|
secret_key: str | None = None,
|
||||||
|
ttl: int = 86400,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> None:
|
||||||
|
self._name = name
|
||||||
|
self._validator = _ensure_async(validator)
|
||||||
|
self._secret_key = secret_key
|
||||||
|
self._ttl = ttl
|
||||||
|
self._kwargs = kwargs
|
||||||
|
self._scheme = APIKeyCookie(name=name, auto_error=False)
|
||||||
|
|
||||||
|
async def _call(
|
||||||
|
security_scopes: SecurityScopes, # noqa: ARG001
|
||||||
|
value: Annotated[str | None, Depends(self._scheme)] = None,
|
||||||
|
) -> Any:
|
||||||
|
if value is None:
|
||||||
|
raise UnauthorizedError()
|
||||||
|
plain = self._verify(value)
|
||||||
|
return await self._validator(plain, **self._kwargs)
|
||||||
|
|
||||||
|
self._call_fn = _call
|
||||||
|
self.__signature__ = inspect.signature(_call)
|
||||||
|
|
||||||
|
def _hmac(self, data: str) -> str:
|
||||||
|
if self._secret_key is None:
|
||||||
|
raise RuntimeError("_hmac called without secret_key configured")
|
||||||
|
return hmac.new(
|
||||||
|
self._secret_key.encode(), data.encode(), hashlib.sha256
|
||||||
|
).hexdigest()
|
||||||
|
|
||||||
|
def _sign(self, value: str) -> str:
|
||||||
|
data = base64.urlsafe_b64encode(
|
||||||
|
json.dumps({"v": value, "exp": int(time.time()) + self._ttl}).encode()
|
||||||
|
).decode()
|
||||||
|
return f"{data}.{self._hmac(data)}"
|
||||||
|
|
||||||
|
def _verify(self, cookie_value: str) -> str:
|
||||||
|
"""Return the plain value, verifying HMAC + expiry when signed."""
|
||||||
|
if not self._secret_key:
|
||||||
|
return cookie_value
|
||||||
|
|
||||||
|
try:
|
||||||
|
data, sig = cookie_value.rsplit(".", 1)
|
||||||
|
except ValueError:
|
||||||
|
raise UnauthorizedError()
|
||||||
|
|
||||||
|
if not hmac.compare_digest(self._hmac(data), sig):
|
||||||
|
raise UnauthorizedError()
|
||||||
|
|
||||||
|
try:
|
||||||
|
payload = json.loads(base64.urlsafe_b64decode(data))
|
||||||
|
value: str = payload["v"]
|
||||||
|
exp: int = payload["exp"]
|
||||||
|
except Exception:
|
||||||
|
raise UnauthorizedError()
|
||||||
|
|
||||||
|
if exp < int(time.time()):
|
||||||
|
raise UnauthorizedError()
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
async def extract(self, request: Request) -> str | None:
|
||||||
|
return request.cookies.get(self._name)
|
||||||
|
|
||||||
|
async def authenticate(self, credential: str) -> Any:
|
||||||
|
plain = self._verify(credential)
|
||||||
|
return await self._validator(plain, **self._kwargs)
|
||||||
|
|
||||||
|
def require(self, **kwargs: Any) -> "CookieAuth":
|
||||||
|
"""Return a new instance with additional (or overriding) validator kwargs."""
|
||||||
|
return CookieAuth(
|
||||||
|
self._name,
|
||||||
|
self._validator,
|
||||||
|
secret_key=self._secret_key,
|
||||||
|
ttl=self._ttl,
|
||||||
|
**{**self._kwargs, **kwargs},
|
||||||
|
)
|
||||||
|
|
||||||
|
def set_cookie(self, response: Response, value: str) -> None:
|
||||||
|
"""Attach the cookie to *response*, signing it when ``secret_key`` is set."""
|
||||||
|
cookie_value = self._sign(value) if self._secret_key else value
|
||||||
|
response.set_cookie(
|
||||||
|
self._name,
|
||||||
|
cookie_value,
|
||||||
|
httponly=True,
|
||||||
|
samesite="lax",
|
||||||
|
max_age=self._ttl,
|
||||||
|
)
|
||||||
|
|
||||||
|
def delete_cookie(self, response: Response) -> None:
|
||||||
|
"""Clear the session cookie (logout)."""
|
||||||
|
response.delete_cookie(self._name, httponly=True, samesite="lax")
|
||||||
67
src/fastapi_toolsets/security/sources/header.py
Normal file
67
src/fastapi_toolsets/security/sources/header.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
"""API key header authentication source."""
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
from typing import Annotated, Any, Callable
|
||||||
|
|
||||||
|
from fastapi import Depends, Request
|
||||||
|
from fastapi.security import APIKeyHeader, SecurityScopes
|
||||||
|
|
||||||
|
from fastapi_toolsets.exceptions import UnauthorizedError
|
||||||
|
|
||||||
|
from ..abc import AuthSource, _ensure_async
|
||||||
|
|
||||||
|
|
||||||
|
class APIKeyHeaderAuth(AuthSource):
|
||||||
|
"""API key header authentication source.
|
||||||
|
|
||||||
|
Wraps :class:`fastapi.security.APIKeyHeader` for OpenAPI documentation.
|
||||||
|
The validator is called as ``await validator(api_key, **kwargs)``
|
||||||
|
where ``kwargs`` are the extra keyword arguments provided at instantiation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: HTTP header name that carries the API key (e.g. ``"X-API-Key"``).
|
||||||
|
validator: Sync or async callable that receives the API key and any
|
||||||
|
extra keyword arguments, and returns the authenticated identity.
|
||||||
|
Should raise :class:`~fastapi_toolsets.exceptions.UnauthorizedError`
|
||||||
|
on failure.
|
||||||
|
**kwargs: Extra keyword arguments forwarded to the validator on every
|
||||||
|
call (e.g. ``role=Role.ADMIN``).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
validator: Callable[..., Any],
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> None:
|
||||||
|
self._name = name
|
||||||
|
self._validator = _ensure_async(validator)
|
||||||
|
self._kwargs = kwargs
|
||||||
|
self._scheme = APIKeyHeader(name=name, auto_error=False)
|
||||||
|
|
||||||
|
async def _call(
|
||||||
|
security_scopes: SecurityScopes, # noqa: ARG001
|
||||||
|
api_key: Annotated[str | None, Depends(self._scheme)] = None,
|
||||||
|
) -> Any:
|
||||||
|
if api_key is None:
|
||||||
|
raise UnauthorizedError()
|
||||||
|
return await self._validator(api_key, **self._kwargs)
|
||||||
|
|
||||||
|
self._call_fn = _call
|
||||||
|
self.__signature__ = inspect.signature(_call)
|
||||||
|
|
||||||
|
async def extract(self, request: Request) -> str | None:
|
||||||
|
"""Extract the API key from the configured header."""
|
||||||
|
return request.headers.get(self._name) or None
|
||||||
|
|
||||||
|
async def authenticate(self, credential: str) -> Any:
|
||||||
|
"""Validate a credential and return the identity."""
|
||||||
|
return await self._validator(credential, **self._kwargs)
|
||||||
|
|
||||||
|
def require(self, **kwargs: Any) -> "APIKeyHeaderAuth":
|
||||||
|
"""Return a new instance with additional (or overriding) validator kwargs."""
|
||||||
|
return APIKeyHeaderAuth(
|
||||||
|
self._name,
|
||||||
|
self._validator,
|
||||||
|
**{**self._kwargs, **kwargs},
|
||||||
|
)
|
||||||
119
src/fastapi_toolsets/security/sources/multi.py
Normal file
119
src/fastapi_toolsets/security/sources/multi.py
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
"""MultiAuth: combine multiple authentication sources into a single callable."""
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
from typing import Any, cast
|
||||||
|
|
||||||
|
from fastapi import Request
|
||||||
|
from fastapi.security import SecurityScopes
|
||||||
|
|
||||||
|
from fastapi_toolsets.exceptions import UnauthorizedError
|
||||||
|
|
||||||
|
from ..abc import AuthSource
|
||||||
|
|
||||||
|
|
||||||
|
class MultiAuth:
|
||||||
|
"""Combine multiple authentication sources into a single callable.
|
||||||
|
|
||||||
|
Sources are tried in order; the first one whose
|
||||||
|
:meth:`~AuthSource.extract` returns a non-``None`` credential wins.
|
||||||
|
Its :meth:`~AuthSource.authenticate` is called and the result returned.
|
||||||
|
|
||||||
|
If a credential is found but the validator raises, the exception propagates
|
||||||
|
immediately — the remaining sources are **not** tried. This prevents
|
||||||
|
silent fallthrough on invalid credentials.
|
||||||
|
|
||||||
|
If no source provides a credential,
|
||||||
|
:class:`~fastapi_toolsets.exceptions.UnauthorizedError` is raised.
|
||||||
|
|
||||||
|
The :meth:`~AuthSource.extract` method of each source performs only
|
||||||
|
string matching (no I/O), so prefix-based dispatch is essentially free.
|
||||||
|
|
||||||
|
Any :class:`~AuthSource` subclass — including user-defined ones — can be
|
||||||
|
passed as a source.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
*sources: Auth source instances to try in order.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
user_bearer = BearerTokenAuth(verify_user, prefix="user_")
|
||||||
|
org_bearer = BearerTokenAuth(verify_org, prefix="org_")
|
||||||
|
cookie = CookieAuth("session", verify_session)
|
||||||
|
|
||||||
|
multi = MultiAuth(user_bearer, org_bearer, cookie)
|
||||||
|
|
||||||
|
@app.get("/data")
|
||||||
|
async def data_route(user = Security(multi)):
|
||||||
|
return user
|
||||||
|
|
||||||
|
# Apply a shared requirement to all sources at once
|
||||||
|
@app.get("/admin")
|
||||||
|
async def admin_route(user = Security(multi.require(role=Role.ADMIN))):
|
||||||
|
return user
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *sources: AuthSource) -> None:
|
||||||
|
self._sources = sources
|
||||||
|
|
||||||
|
async def _call(
|
||||||
|
request: Request,
|
||||||
|
security_scopes: SecurityScopes, # noqa: ARG001
|
||||||
|
**kwargs: Any, # noqa: ARG001 — absorbs scheme values injected by FastAPI
|
||||||
|
) -> Any:
|
||||||
|
for source in self._sources:
|
||||||
|
credential = await source.extract(request)
|
||||||
|
if credential is not None:
|
||||||
|
return await source.authenticate(credential)
|
||||||
|
raise UnauthorizedError()
|
||||||
|
|
||||||
|
self._call_fn = _call
|
||||||
|
|
||||||
|
# Build a merged signature that includes the security-scheme Depends()
|
||||||
|
# parameters from every source so FastAPI registers them in OpenAPI docs.
|
||||||
|
seen: set[str] = {"request", "security_scopes"}
|
||||||
|
merged: list[inspect.Parameter] = [
|
||||||
|
inspect.Parameter(
|
||||||
|
"request",
|
||||||
|
inspect.Parameter.POSITIONAL_OR_KEYWORD,
|
||||||
|
annotation=Request,
|
||||||
|
),
|
||||||
|
inspect.Parameter(
|
||||||
|
"security_scopes",
|
||||||
|
inspect.Parameter.POSITIONAL_OR_KEYWORD,
|
||||||
|
annotation=SecurityScopes,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
for i, source in enumerate(sources):
|
||||||
|
for name, param in inspect.signature(source).parameters.items():
|
||||||
|
if name in seen:
|
||||||
|
continue
|
||||||
|
merged.append(param.replace(name=f"_s{i}_{name}"))
|
||||||
|
seen.add(name)
|
||||||
|
self.__signature__ = inspect.Signature(merged, return_annotation=Any)
|
||||||
|
|
||||||
|
async def __call__(self, **kwargs: Any) -> Any:
|
||||||
|
return await self._call_fn(**kwargs)
|
||||||
|
|
||||||
|
def require(self, **kwargs: Any) -> "MultiAuth":
|
||||||
|
"""Return a new :class:`MultiAuth` with kwargs forwarded to each source.
|
||||||
|
|
||||||
|
Calls ``.require(**kwargs)`` on every source that supports it. Sources
|
||||||
|
that do not implement ``.require()`` (e.g. custom :class:`~AuthSource`
|
||||||
|
subclasses) are passed through unchanged.
|
||||||
|
|
||||||
|
New kwargs are merged over each source's existing kwargs — new values
|
||||||
|
win on conflict::
|
||||||
|
|
||||||
|
multi = MultiAuth(bearer, cookie)
|
||||||
|
|
||||||
|
@app.get("/admin")
|
||||||
|
async def admin(user = Security(multi.require(role=Role.ADMIN))):
|
||||||
|
return user
|
||||||
|
"""
|
||||||
|
new_sources = tuple(
|
||||||
|
cast(Any, source).require(**kwargs)
|
||||||
|
if hasattr(source, "require")
|
||||||
|
else source
|
||||||
|
for source in self._sources
|
||||||
|
)
|
||||||
|
return MultiAuth(*new_sources)
|
||||||
@@ -14,11 +14,13 @@ from sqlalchemy import (
|
|||||||
DateTime,
|
DateTime,
|
||||||
ForeignKey,
|
ForeignKey,
|
||||||
Integer,
|
Integer,
|
||||||
|
JSON,
|
||||||
Numeric,
|
Numeric,
|
||||||
String,
|
String,
|
||||||
Table,
|
Table,
|
||||||
Uuid,
|
Uuid,
|
||||||
)
|
)
|
||||||
|
from sqlalchemy.dialects.postgresql import ARRAY
|
||||||
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
||||||
|
|
||||||
@@ -57,6 +59,7 @@ class User(Base):
|
|||||||
username: Mapped[str] = mapped_column(String(50), unique=True)
|
username: Mapped[str] = mapped_column(String(50), unique=True)
|
||||||
email: Mapped[str] = mapped_column(String(100), unique=True)
|
email: Mapped[str] = mapped_column(String(100), unique=True)
|
||||||
is_active: Mapped[bool] = mapped_column(default=True)
|
is_active: Mapped[bool] = mapped_column(default=True)
|
||||||
|
notes: Mapped[str | None]
|
||||||
role_id: Mapped[uuid.UUID | None] = mapped_column(
|
role_id: Mapped[uuid.UUID | None] = mapped_column(
|
||||||
ForeignKey("roles.id"), nullable=True
|
ForeignKey("roles.id"), nullable=True
|
||||||
)
|
)
|
||||||
@@ -136,6 +139,17 @@ class Post(Base):
|
|||||||
tags: Mapped[list[Tag]] = relationship(secondary=post_tags)
|
tags: Mapped[list[Tag]] = relationship(secondary=post_tags)
|
||||||
|
|
||||||
|
|
||||||
|
class Article(Base):
|
||||||
|
"""Test article model with ARRAY and JSON columns."""
|
||||||
|
|
||||||
|
__tablename__ = "articles"
|
||||||
|
|
||||||
|
id: Mapped[uuid.UUID] = mapped_column(Uuid, primary_key=True, default=uuid.uuid4)
|
||||||
|
title: Mapped[str] = mapped_column(String(200))
|
||||||
|
labels: Mapped[list[str]] = mapped_column(ARRAY(String))
|
||||||
|
metadata_: Mapped[dict | None] = mapped_column("metadata", JSON, nullable=True)
|
||||||
|
|
||||||
|
|
||||||
class RoleCreate(BaseModel):
|
class RoleCreate(BaseModel):
|
||||||
"""Schema for creating a role."""
|
"""Schema for creating a role."""
|
||||||
|
|
||||||
@@ -270,6 +284,23 @@ class ProductCreate(BaseModel):
|
|||||||
price: decimal.Decimal
|
price: decimal.Decimal
|
||||||
|
|
||||||
|
|
||||||
|
class ArticleCreate(BaseModel):
|
||||||
|
"""Schema for creating an article."""
|
||||||
|
|
||||||
|
id: uuid.UUID | None = None
|
||||||
|
title: str
|
||||||
|
labels: list[str] = []
|
||||||
|
|
||||||
|
|
||||||
|
class ArticleRead(PydanticBase):
|
||||||
|
"""Schema for reading an article."""
|
||||||
|
|
||||||
|
id: uuid.UUID
|
||||||
|
title: str
|
||||||
|
labels: list[str]
|
||||||
|
|
||||||
|
|
||||||
|
ArticleCrud = CrudFactory(Article)
|
||||||
RoleCrud = CrudFactory(Role)
|
RoleCrud = CrudFactory(Role)
|
||||||
RoleCursorCrud = CrudFactory(Role, cursor_column=Role.id)
|
RoleCursorCrud = CrudFactory(Role, cursor_column=Role.id)
|
||||||
IntRoleCursorCrud = CrudFactory(IntRole, cursor_column=IntRole.id)
|
IntRoleCursorCrud = CrudFactory(IntRole, cursor_column=IntRole.id)
|
||||||
|
|||||||
@@ -211,6 +211,38 @@ class TestResolveLoadOptions:
|
|||||||
assert crud._resolve_load_options([]) == []
|
assert crud._resolve_load_options([]) == []
|
||||||
|
|
||||||
|
|
||||||
|
class TestResolveSearchColumns:
|
||||||
|
"""Tests for _resolve_search_columns logic."""
|
||||||
|
|
||||||
|
def test_returns_none_when_no_searchable_fields(self):
|
||||||
|
"""Returns None when cls.searchable_fields is None and no search_fields passed."""
|
||||||
|
|
||||||
|
class AbstractCrud(AsyncCrud[User]):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert AbstractCrud._resolve_search_columns(None) is None
|
||||||
|
|
||||||
|
def test_returns_none_when_empty_search_fields_passed(self):
|
||||||
|
"""Returns None when an empty list is passed explicitly."""
|
||||||
|
crud = CrudFactory(User)
|
||||||
|
assert crud._resolve_search_columns([]) is None
|
||||||
|
|
||||||
|
def test_returns_keys_from_class_searchable_fields(self):
|
||||||
|
"""Returns column keys from cls.searchable_fields when no override passed."""
|
||||||
|
crud = CrudFactory(User, searchable_fields=[User.username])
|
||||||
|
result = crud._resolve_search_columns(None)
|
||||||
|
assert result is not None
|
||||||
|
assert "username" in result
|
||||||
|
|
||||||
|
def test_search_fields_override_takes_priority(self):
|
||||||
|
"""Explicit search_fields override cls.searchable_fields."""
|
||||||
|
crud = CrudFactory(User, searchable_fields=[User.username])
|
||||||
|
result = crud._resolve_search_columns([User.email])
|
||||||
|
assert result is not None
|
||||||
|
assert "email" in result
|
||||||
|
assert "username" not in result
|
||||||
|
|
||||||
|
|
||||||
class TestDefaultLoadOptionsIntegration:
|
class TestDefaultLoadOptionsIntegration:
|
||||||
"""Integration tests for default_load_options with real DB queries."""
|
"""Integration tests for default_load_options with real DB queries."""
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -97,7 +97,7 @@ class TestAppSessionDep:
|
|||||||
gen = get_db()
|
gen = get_db()
|
||||||
session = await gen.__anext__()
|
session = await gen.__anext__()
|
||||||
assert isinstance(session, AsyncSession)
|
assert isinstance(session, AsyncSession)
|
||||||
await session.close()
|
await gen.aclose()
|
||||||
|
|
||||||
|
|
||||||
class TestOffsetPagination:
|
class TestOffsetPagination:
|
||||||
@@ -182,8 +182,7 @@ class TestOffsetPagination:
|
|||||||
body = resp.json()
|
body = resp.json()
|
||||||
fa = body["filter_attributes"]
|
fa = body["filter_attributes"]
|
||||||
assert set(fa["status"]) == {"draft", "published"}
|
assert set(fa["status"]) == {"draft", "published"}
|
||||||
# "name" is unique across all facet fields — no prefix needed
|
assert set(fa["category__name"]) == {"backend", "python"}
|
||||||
assert set(fa["name"]) == {"backend", "python"}
|
|
||||||
|
|
||||||
@pytest.mark.anyio
|
@pytest.mark.anyio
|
||||||
async def test_filter_attributes_scoped_to_filter(
|
async def test_filter_attributes_scoped_to_filter(
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
"""Tests for fastapi_toolsets.fixtures module."""
|
"""Tests for fastapi_toolsets.fixtures module."""
|
||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
@@ -13,10 +14,22 @@ from fastapi_toolsets.fixtures import (
|
|||||||
load_fixtures,
|
load_fixtures,
|
||||||
load_fixtures_by_context,
|
load_fixtures_by_context,
|
||||||
)
|
)
|
||||||
|
from fastapi_toolsets.fixtures.utils import _get_primary_key, _instance_to_dict
|
||||||
|
|
||||||
from fastapi_toolsets.fixtures.utils import _get_primary_key
|
from .conftest import IntRole, Permission, Role, RoleCreate, RoleCrud, User, UserCrud
|
||||||
|
|
||||||
from .conftest import IntRole, Permission, Role, RoleCrud, User, UserCrud
|
|
||||||
|
class AppContext(str, Enum):
|
||||||
|
"""Example user-defined str+Enum context."""
|
||||||
|
|
||||||
|
STAGING = "staging"
|
||||||
|
DEMO = "demo"
|
||||||
|
|
||||||
|
|
||||||
|
class PlainEnumContext(Enum):
|
||||||
|
"""Example user-defined plain Enum context (no str mixin)."""
|
||||||
|
|
||||||
|
STAGING = "staging"
|
||||||
|
|
||||||
|
|
||||||
class TestContext:
|
class TestContext:
|
||||||
@@ -39,6 +52,86 @@ class TestContext:
|
|||||||
assert Context.TESTING.value == "testing"
|
assert Context.TESTING.value == "testing"
|
||||||
|
|
||||||
|
|
||||||
|
class TestCustomEnumContext:
|
||||||
|
"""Custom Enum types are accepted wherever Context/str are expected."""
|
||||||
|
|
||||||
|
def test_cannot_subclass_context_with_members(self):
|
||||||
|
"""Python prohibits extending an Enum that already has members."""
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
|
||||||
|
class MyContext(Context): # noqa: F841 # ty: ignore[subclass-of-final-class]
|
||||||
|
STAGING = "staging"
|
||||||
|
|
||||||
|
def test_custom_enum_values_interchangeable_with_context(self):
|
||||||
|
"""A custom enum with the same .value as a built-in Context member is
|
||||||
|
treated as the same context — fixtures registered under one are found
|
||||||
|
by the other."""
|
||||||
|
|
||||||
|
class AppContextFull(str, Enum):
|
||||||
|
BASE = "base"
|
||||||
|
STAGING = "staging"
|
||||||
|
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry.register(contexts=[Context.BASE])
|
||||||
|
def roles():
|
||||||
|
return []
|
||||||
|
|
||||||
|
# AppContextFull.BASE has value "base" — same as Context.BASE
|
||||||
|
fixtures = registry.get_by_context(AppContextFull.BASE)
|
||||||
|
assert len(fixtures) == 1
|
||||||
|
|
||||||
|
def test_custom_enum_registry_default_contexts(self):
|
||||||
|
"""FixtureRegistry(contexts=[...]) accepts a custom Enum."""
|
||||||
|
registry = FixtureRegistry(contexts=[AppContext.STAGING])
|
||||||
|
|
||||||
|
@registry.register
|
||||||
|
def data():
|
||||||
|
return []
|
||||||
|
|
||||||
|
fixture = registry.get("data")
|
||||||
|
assert fixture.contexts == ["staging"]
|
||||||
|
|
||||||
|
def test_custom_enum_resolve_context_dependencies(self):
|
||||||
|
"""resolve_context_dependencies accepts a custom Enum context."""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry.register(contexts=[AppContext.STAGING])
|
||||||
|
def staging_roles():
|
||||||
|
return []
|
||||||
|
|
||||||
|
order = registry.resolve_context_dependencies(AppContext.STAGING)
|
||||||
|
assert "staging_roles" in order
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_custom_enum_e2e(self, db_session: AsyncSession):
|
||||||
|
"""End-to-end: register with custom Enum, load with the same Enum."""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry.register(contexts=[AppContext.STAGING])
|
||||||
|
def staging_roles():
|
||||||
|
return [Role(id=uuid.uuid4(), name="staging-admin")]
|
||||||
|
|
||||||
|
result = await load_fixtures_by_context(
|
||||||
|
db_session, registry, AppContext.STAGING
|
||||||
|
)
|
||||||
|
assert len(result["staging_roles"]) == 1
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_plain_enum_e2e(self, db_session: AsyncSession):
|
||||||
|
"""End-to-end: register with plain Enum, load with the same Enum."""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry.register(contexts=[PlainEnumContext.STAGING])
|
||||||
|
def staging_roles():
|
||||||
|
return [Role(id=uuid.uuid4(), name="plain-staging-admin")]
|
||||||
|
|
||||||
|
result = await load_fixtures_by_context(
|
||||||
|
db_session, registry, PlainEnumContext.STAGING
|
||||||
|
)
|
||||||
|
assert len(result["staging_roles"]) == 1
|
||||||
|
|
||||||
|
|
||||||
class TestLoadStrategy:
|
class TestLoadStrategy:
|
||||||
"""Tests for LoadStrategy enum."""
|
"""Tests for LoadStrategy enum."""
|
||||||
|
|
||||||
@@ -407,6 +500,37 @@ class TestDependencyResolution:
|
|||||||
with pytest.raises(ValueError, match="Circular dependency"):
|
with pytest.raises(ValueError, match="Circular dependency"):
|
||||||
registry.resolve_dependencies("a")
|
registry.resolve_dependencies("a")
|
||||||
|
|
||||||
|
def test_resolve_raises_for_unknown_dependency(self):
|
||||||
|
"""KeyError when depends_on references an unregistered fixture."""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry.register(depends_on=["ghost"])
|
||||||
|
def users():
|
||||||
|
return []
|
||||||
|
|
||||||
|
with pytest.raises(KeyError, match="ghost"):
|
||||||
|
registry.resolve_dependencies("users")
|
||||||
|
|
||||||
|
def test_resolve_deduplicates_shared_depends_on_across_variants(self):
|
||||||
|
"""A dep shared by two same-name variants appears only once in the order."""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry.register(contexts=[Context.BASE])
|
||||||
|
def roles():
|
||||||
|
return []
|
||||||
|
|
||||||
|
@registry.register(depends_on=["roles"], contexts=[Context.BASE])
|
||||||
|
def items():
|
||||||
|
return []
|
||||||
|
|
||||||
|
@registry.register(depends_on=["roles"], contexts=[Context.TESTING])
|
||||||
|
def items(): # noqa: F811
|
||||||
|
return []
|
||||||
|
|
||||||
|
order = registry.resolve_dependencies("items")
|
||||||
|
assert order.count("roles") == 1
|
||||||
|
assert order.index("roles") < order.index("items")
|
||||||
|
|
||||||
def test_resolve_context_dependencies(self):
|
def test_resolve_context_dependencies(self):
|
||||||
"""Resolve all fixtures for a context with dependencies."""
|
"""Resolve all fixtures for a context with dependencies."""
|
||||||
registry = FixtureRegistry()
|
registry = FixtureRegistry()
|
||||||
@@ -496,6 +620,52 @@ class TestLoadFixtures:
|
|||||||
count = await RoleCrud.count(db_session)
|
count = await RoleCrud.count(db_session)
|
||||||
assert count == 1
|
assert count == 1
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_merge_does_not_overwrite_omitted_nullable_columns(
|
||||||
|
self, db_session: AsyncSession
|
||||||
|
):
|
||||||
|
"""MERGE must not clear nullable columns that the fixture didn't set.
|
||||||
|
|
||||||
|
When a fixture omits a nullable column (e.g. role_id or notes), a re-merge
|
||||||
|
must leave the existing DB value untouched — not overwrite it with NULL.
|
||||||
|
"""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
admin = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||||
|
uid = uuid.uuid4()
|
||||||
|
|
||||||
|
# First load: user has role_id and notes set
|
||||||
|
@registry.register
|
||||||
|
def users():
|
||||||
|
return [
|
||||||
|
User(
|
||||||
|
id=uid,
|
||||||
|
username="alice",
|
||||||
|
email="a@test.com",
|
||||||
|
role_id=admin.id,
|
||||||
|
notes="original",
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
await load_fixtures(db_session, registry, "users", strategy=LoadStrategy.MERGE)
|
||||||
|
|
||||||
|
# Second load: fixture omits role_id and notes
|
||||||
|
registry2 = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry2.register
|
||||||
|
def users(): # noqa: F811
|
||||||
|
return [User(id=uid, username="alice-updated", email="a@test.com")]
|
||||||
|
|
||||||
|
await load_fixtures(db_session, registry2, "users", strategy=LoadStrategy.MERGE)
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
row = (
|
||||||
|
await db_session.execute(select(User).where(User.id == uid))
|
||||||
|
).scalar_one()
|
||||||
|
assert row.username == "alice-updated" # updated column changed
|
||||||
|
assert row.role_id == admin.id # omitted → preserved
|
||||||
|
assert row.notes == "original" # omitted → preserved
|
||||||
|
|
||||||
@pytest.mark.anyio
|
@pytest.mark.anyio
|
||||||
async def test_load_with_skip_existing_strategy(self, db_session: AsyncSession):
|
async def test_load_with_skip_existing_strategy(self, db_session: AsyncSession):
|
||||||
"""Load fixtures with SKIP_EXISTING strategy."""
|
"""Load fixtures with SKIP_EXISTING strategy."""
|
||||||
@@ -795,3 +965,511 @@ class TestGetPrimaryKey:
|
|||||||
instance = Permission(subject="post") # action is None
|
instance = Permission(subject="post") # action is None
|
||||||
pk = _get_primary_key(instance)
|
pk = _get_primary_key(instance)
|
||||||
assert pk is None
|
assert pk is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestRegistryGetVariants:
|
||||||
|
"""Tests for FixtureRegistry.get and get_variants edge cases."""
|
||||||
|
|
||||||
|
def test_get_raises_value_error_for_multi_variant(self):
|
||||||
|
"""get() raises ValueError when the fixture has multiple context variants."""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry.register(contexts=[Context.BASE])
|
||||||
|
def items():
|
||||||
|
return []
|
||||||
|
|
||||||
|
@registry.register(contexts=[Context.TESTING])
|
||||||
|
def items(): # noqa: F811
|
||||||
|
return []
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match="get_variants"):
|
||||||
|
registry.get("items")
|
||||||
|
|
||||||
|
def test_get_variants_raises_key_error_for_unknown(self):
|
||||||
|
"""get_variants() raises KeyError for an unregistered name."""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
with pytest.raises(KeyError, match="not found"):
|
||||||
|
registry.get_variants("no_such_fixture")
|
||||||
|
|
||||||
|
|
||||||
|
class TestInstanceToDict:
|
||||||
|
"""Unit tests for the _instance_to_dict helper."""
|
||||||
|
|
||||||
|
def test_explicit_values_included(self):
|
||||||
|
"""All explicitly set column values appear in the result."""
|
||||||
|
role_id = uuid.uuid4()
|
||||||
|
instance = Role(id=role_id, name="admin")
|
||||||
|
d = _instance_to_dict(instance)
|
||||||
|
assert d["id"] == role_id
|
||||||
|
assert d["name"] == "admin"
|
||||||
|
|
||||||
|
def test_callable_default_none_excluded(self):
|
||||||
|
"""A column whose value is None but has a callable Python-side default
|
||||||
|
(e.g. ``default=uuid.uuid4``) is excluded so the DB generates it."""
|
||||||
|
instance = Role(id=None, name="admin")
|
||||||
|
d = _instance_to_dict(instance)
|
||||||
|
assert "id" not in d
|
||||||
|
assert d["name"] == "admin"
|
||||||
|
|
||||||
|
def test_autoincrement_none_excluded(self):
|
||||||
|
"""A column whose value is None but has autoincrement=True is excluded
|
||||||
|
so the DB generates the value via its sequence."""
|
||||||
|
instance = IntRole(id=None, name="admin")
|
||||||
|
d = _instance_to_dict(instance)
|
||||||
|
assert "id" not in d
|
||||||
|
assert d["name"] == "admin"
|
||||||
|
|
||||||
|
def test_nullable_none_included(self):
|
||||||
|
"""None on a nullable column with no default is kept (explicit NULL)."""
|
||||||
|
instance = User(id=uuid.uuid4(), username="u", email="e@e.com", role_id=None)
|
||||||
|
d = _instance_to_dict(instance)
|
||||||
|
assert "role_id" in d
|
||||||
|
assert d["role_id"] is None
|
||||||
|
|
||||||
|
def test_nullable_str_no_default_omitted_not_in_dict(self):
|
||||||
|
"""Mapped[str | None] with no default, not provided in constructor, is absent from dict."""
|
||||||
|
instance = User(id=uuid.uuid4(), username="u", email="e@e.com")
|
||||||
|
d = _instance_to_dict(instance)
|
||||||
|
assert "notes" not in d
|
||||||
|
|
||||||
|
def test_nullable_str_no_default_explicit_none_included(self):
|
||||||
|
"""Mapped[str | None] with no default, explicitly set to None, is included as NULL."""
|
||||||
|
instance = User(id=uuid.uuid4(), username="u", email="e@e.com", notes=None)
|
||||||
|
d = _instance_to_dict(instance)
|
||||||
|
assert "notes" in d
|
||||||
|
assert d["notes"] is None
|
||||||
|
|
||||||
|
def test_nullable_str_no_default_with_value_included(self):
|
||||||
|
"""Mapped[str | None] with no default and a value set is included normally."""
|
||||||
|
instance = User(id=uuid.uuid4(), username="u", email="e@e.com", notes="hello")
|
||||||
|
d = _instance_to_dict(instance)
|
||||||
|
assert d["notes"] == "hello"
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_nullable_str_no_default_insert_roundtrip(
|
||||||
|
self, db_session: AsyncSession
|
||||||
|
):
|
||||||
|
"""Fixture loading works for models with Mapped[str | None] (no default).
|
||||||
|
|
||||||
|
Both the omitted-value (→ NULL) and explicit-None paths must insert without error.
|
||||||
|
"""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
|
||||||
|
uid_a = uuid.uuid4()
|
||||||
|
uid_b = uuid.uuid4()
|
||||||
|
uid_c = uuid.uuid4()
|
||||||
|
|
||||||
|
@registry.register
|
||||||
|
def users():
|
||||||
|
return [
|
||||||
|
User(
|
||||||
|
id=uid_a, username="no_notes", email="a@test.com"
|
||||||
|
), # notes omitted
|
||||||
|
User(
|
||||||
|
id=uid_b, username="null_notes", email="b@test.com", notes=None
|
||||||
|
), # explicit None
|
||||||
|
User(
|
||||||
|
id=uid_c, username="has_notes", email="c@test.com", notes="hi"
|
||||||
|
), # value set
|
||||||
|
]
|
||||||
|
|
||||||
|
result = await load_fixtures(db_session, registry, "users")
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
rows = (
|
||||||
|
(await db_session.execute(select(User).order_by(User.username)))
|
||||||
|
.scalars()
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
by_username = {r.username: r for r in rows}
|
||||||
|
|
||||||
|
assert by_username["no_notes"].notes is None
|
||||||
|
assert by_username["null_notes"].notes is None
|
||||||
|
assert by_username["has_notes"].notes == "hi"
|
||||||
|
assert len(result["users"]) == 3
|
||||||
|
|
||||||
|
|
||||||
|
class TestBatchMergeNonPkColumns:
|
||||||
|
"""Batch MERGE on a model with no non-PK columns (PK-only table)."""
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_merge_pk_only_model(self, db_session: AsyncSession):
|
||||||
|
"""MERGE strategy on a PK-only model uses on_conflict_do_nothing."""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry.register
|
||||||
|
def permissions():
|
||||||
|
return [
|
||||||
|
Permission(subject="post", action="read"),
|
||||||
|
Permission(subject="post", action="write"),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = await load_fixtures(
|
||||||
|
db_session, registry, "permissions", strategy=LoadStrategy.MERGE
|
||||||
|
)
|
||||||
|
assert len(result["permissions"]) == 2
|
||||||
|
|
||||||
|
# Run again — conflicts are silently ignored.
|
||||||
|
result2 = await load_fixtures(
|
||||||
|
db_session, registry, "permissions", strategy=LoadStrategy.MERGE
|
||||||
|
)
|
||||||
|
assert len(result2["permissions"]) == 2
|
||||||
|
|
||||||
|
|
||||||
|
class TestBatchNullableColumnEdgeCases:
|
||||||
|
"""Deep tests for nullable column handling during batch import."""
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_insert_batch_mixed_nullable_fk(self, db_session: AsyncSession):
|
||||||
|
"""INSERT batch where some rows set a nullable FK and others don't.
|
||||||
|
|
||||||
|
After normalization the omitted role_id becomes None. For INSERT this
|
||||||
|
is acceptable — both rows should insert successfully with the correct
|
||||||
|
values (one with FK, one with NULL).
|
||||||
|
"""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
admin = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||||
|
uid1 = uuid.uuid4()
|
||||||
|
uid2 = uuid.uuid4()
|
||||||
|
|
||||||
|
@registry.register
|
||||||
|
def users():
|
||||||
|
return [
|
||||||
|
User(
|
||||||
|
id=uid1, username="with_role", email="a@test.com", role_id=admin.id
|
||||||
|
),
|
||||||
|
User(id=uid2, username="no_role", email="b@test.com"),
|
||||||
|
]
|
||||||
|
|
||||||
|
await load_fixtures(db_session, registry, "users", strategy=LoadStrategy.INSERT)
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
rows = {
|
||||||
|
r.username: r
|
||||||
|
for r in (await db_session.execute(select(User))).scalars().all()
|
||||||
|
}
|
||||||
|
assert rows["with_role"].role_id == admin.id
|
||||||
|
assert rows["no_role"].role_id is None
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_insert_batch_mixed_nullable_notes(self, db_session: AsyncSession):
|
||||||
|
"""INSERT batch where some rows have notes and others don't.
|
||||||
|
|
||||||
|
Ensures normalization doesn't break the insert and that each row gets
|
||||||
|
the intended value.
|
||||||
|
"""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
uid1 = uuid.uuid4()
|
||||||
|
uid2 = uuid.uuid4()
|
||||||
|
uid3 = uuid.uuid4()
|
||||||
|
|
||||||
|
@registry.register
|
||||||
|
def users():
|
||||||
|
return [
|
||||||
|
User(
|
||||||
|
id=uid1,
|
||||||
|
username="has_notes",
|
||||||
|
email="a@test.com",
|
||||||
|
notes="important",
|
||||||
|
),
|
||||||
|
User(id=uid2, username="no_notes", email="b@test.com"),
|
||||||
|
User(id=uid3, username="null_notes", email="c@test.com", notes=None),
|
||||||
|
]
|
||||||
|
|
||||||
|
await load_fixtures(db_session, registry, "users", strategy=LoadStrategy.INSERT)
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
rows = {
|
||||||
|
r.username: r
|
||||||
|
for r in (await db_session.execute(select(User))).scalars().all()
|
||||||
|
}
|
||||||
|
assert rows["has_notes"].notes == "important"
|
||||||
|
assert rows["no_notes"].notes is None
|
||||||
|
assert rows["null_notes"].notes is None
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_merge_batch_mixed_nullable_does_not_overwrite(
|
||||||
|
self, db_session: AsyncSession
|
||||||
|
):
|
||||||
|
"""MERGE batch where one row sets a nullable column and another omits it.
|
||||||
|
|
||||||
|
If both rows already exist in DB, the row that omits the column must
|
||||||
|
NOT have its existing value overwritten with NULL.
|
||||||
|
|
||||||
|
This is the core normalization bug: _normalize_rows fills missing keys
|
||||||
|
with None, and then MERGE's SET clause includes that column for ALL rows.
|
||||||
|
"""
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
admin = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||||
|
uid1 = uuid.uuid4()
|
||||||
|
uid2 = uuid.uuid4()
|
||||||
|
|
||||||
|
# Pre-populate: both users have role_id and notes
|
||||||
|
registry_initial = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry_initial.register
|
||||||
|
def users():
|
||||||
|
return [
|
||||||
|
User(
|
||||||
|
id=uid1,
|
||||||
|
username="alice",
|
||||||
|
email="a@test.com",
|
||||||
|
role_id=admin.id,
|
||||||
|
notes="alice notes",
|
||||||
|
),
|
||||||
|
User(
|
||||||
|
id=uid2,
|
||||||
|
username="bob",
|
||||||
|
email="b@test.com",
|
||||||
|
role_id=admin.id,
|
||||||
|
notes="bob notes",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
await load_fixtures(
|
||||||
|
db_session, registry_initial, "users", strategy=LoadStrategy.INSERT
|
||||||
|
)
|
||||||
|
|
||||||
|
# Re-merge: alice updates notes, bob omits notes entirely
|
||||||
|
registry_merge = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry_merge.register
|
||||||
|
def users(): # noqa: F811
|
||||||
|
return [
|
||||||
|
User(
|
||||||
|
id=uid1,
|
||||||
|
username="alice",
|
||||||
|
email="a@test.com",
|
||||||
|
role_id=admin.id,
|
||||||
|
notes="updated",
|
||||||
|
),
|
||||||
|
User(
|
||||||
|
id=uid2,
|
||||||
|
username="bob",
|
||||||
|
email="b@test.com",
|
||||||
|
role_id=admin.id,
|
||||||
|
), # notes omitted
|
||||||
|
]
|
||||||
|
|
||||||
|
await load_fixtures(
|
||||||
|
db_session, registry_merge, "users", strategy=LoadStrategy.MERGE
|
||||||
|
)
|
||||||
|
|
||||||
|
rows = {
|
||||||
|
r.username: r
|
||||||
|
for r in (await db_session.execute(select(User))).scalars().all()
|
||||||
|
}
|
||||||
|
assert rows["alice"].notes == "updated"
|
||||||
|
# Bob's notes must be preserved, NOT overwritten with NULL
|
||||||
|
assert rows["bob"].notes == "bob notes"
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_merge_batch_mixed_nullable_fk_preserves_existing(
|
||||||
|
self, db_session: AsyncSession
|
||||||
|
):
|
||||||
|
"""MERGE batch where one row sets role_id and another omits it.
|
||||||
|
|
||||||
|
The row that omits role_id must keep its existing DB value.
|
||||||
|
"""
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
admin = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||||
|
editor = await RoleCrud.create(db_session, RoleCreate(name="editor"))
|
||||||
|
uid1 = uuid.uuid4()
|
||||||
|
uid2 = uuid.uuid4()
|
||||||
|
|
||||||
|
# Pre-populate
|
||||||
|
registry_initial = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry_initial.register
|
||||||
|
def users():
|
||||||
|
return [
|
||||||
|
User(
|
||||||
|
id=uid1,
|
||||||
|
username="alice",
|
||||||
|
email="a@test.com",
|
||||||
|
role_id=admin.id,
|
||||||
|
),
|
||||||
|
User(
|
||||||
|
id=uid2,
|
||||||
|
username="bob",
|
||||||
|
email="b@test.com",
|
||||||
|
role_id=editor.id,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
await load_fixtures(
|
||||||
|
db_session, registry_initial, "users", strategy=LoadStrategy.INSERT
|
||||||
|
)
|
||||||
|
|
||||||
|
# Re-merge: alice changes role, bob omits role_id
|
||||||
|
registry_merge = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry_merge.register
|
||||||
|
def users(): # noqa: F811
|
||||||
|
return [
|
||||||
|
User(
|
||||||
|
id=uid1,
|
||||||
|
username="alice",
|
||||||
|
email="a@test.com",
|
||||||
|
role_id=editor.id,
|
||||||
|
),
|
||||||
|
User(id=uid2, username="bob", email="b@test.com"), # role_id omitted
|
||||||
|
]
|
||||||
|
|
||||||
|
await load_fixtures(
|
||||||
|
db_session, registry_merge, "users", strategy=LoadStrategy.MERGE
|
||||||
|
)
|
||||||
|
|
||||||
|
rows = {
|
||||||
|
r.username: r
|
||||||
|
for r in (await db_session.execute(select(User))).scalars().all()
|
||||||
|
}
|
||||||
|
assert rows["alice"].role_id == editor.id # updated
|
||||||
|
assert rows["bob"].role_id == editor.id # must be preserved, NOT NULL
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_insert_batch_mixed_pk_presence(self, db_session: AsyncSession):
|
||||||
|
"""INSERT batch where some rows have explicit PK and others rely on
|
||||||
|
the callable default (uuid.uuid4).
|
||||||
|
|
||||||
|
Normalization adds the PK key with None to rows that omitted it,
|
||||||
|
which can cause NOT NULL violations on the PK column.
|
||||||
|
"""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
explicit_id = uuid.uuid4()
|
||||||
|
|
||||||
|
@registry.register
|
||||||
|
def roles():
|
||||||
|
return [
|
||||||
|
Role(id=explicit_id, name="admin"),
|
||||||
|
Role(name="user"), # PK omitted, relies on default=uuid.uuid4
|
||||||
|
]
|
||||||
|
|
||||||
|
await load_fixtures(db_session, registry, "roles", strategy=LoadStrategy.INSERT)
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
rows = (await db_session.execute(select(Role))).scalars().all()
|
||||||
|
assert len(rows) == 2
|
||||||
|
names = {r.name for r in rows}
|
||||||
|
assert names == {"admin", "user"}
|
||||||
|
# The "admin" row must have the explicit ID
|
||||||
|
admin = next(r for r in rows if r.name == "admin")
|
||||||
|
assert admin.id == explicit_id
|
||||||
|
# The "user" row must have a generated UUID (not None)
|
||||||
|
user = next(r for r in rows if r.name == "user")
|
||||||
|
assert user.id is not None
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_skip_existing_batch_mixed_nullable(self, db_session: AsyncSession):
|
||||||
|
"""SKIP_EXISTING with mixed nullable columns inserts correctly.
|
||||||
|
|
||||||
|
Only new rows are inserted; existing rows are untouched regardless of
|
||||||
|
which columns the fixture provides.
|
||||||
|
"""
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
admin = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||||
|
uid1 = uuid.uuid4()
|
||||||
|
uid2 = uuid.uuid4()
|
||||||
|
|
||||||
|
# Pre-populate uid1 with notes
|
||||||
|
registry_initial = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry_initial.register
|
||||||
|
def users():
|
||||||
|
return [
|
||||||
|
User(
|
||||||
|
id=uid1,
|
||||||
|
username="alice",
|
||||||
|
email="a@test.com",
|
||||||
|
role_id=admin.id,
|
||||||
|
notes="keep me",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
await load_fixtures(
|
||||||
|
db_session, registry_initial, "users", strategy=LoadStrategy.INSERT
|
||||||
|
)
|
||||||
|
|
||||||
|
# Load again with SKIP_EXISTING: uid1 already exists, uid2 is new
|
||||||
|
registry_skip = FixtureRegistry()
|
||||||
|
|
||||||
|
@registry_skip.register
|
||||||
|
def users(): # noqa: F811
|
||||||
|
return [
|
||||||
|
User(id=uid1, username="alice-updated", email="a@test.com"), # exists
|
||||||
|
User(
|
||||||
|
id=uid2,
|
||||||
|
username="bob",
|
||||||
|
email="b@test.com",
|
||||||
|
notes="new user",
|
||||||
|
), # new
|
||||||
|
]
|
||||||
|
|
||||||
|
result = await load_fixtures(
|
||||||
|
db_session, registry_skip, "users", strategy=LoadStrategy.SKIP_EXISTING
|
||||||
|
)
|
||||||
|
assert len(result["users"]) == 1 # only bob inserted
|
||||||
|
|
||||||
|
rows = {
|
||||||
|
r.username: r
|
||||||
|
for r in (await db_session.execute(select(User))).scalars().all()
|
||||||
|
}
|
||||||
|
# alice untouched
|
||||||
|
assert rows["alice"].role_id == admin.id
|
||||||
|
assert rows["alice"].notes == "keep me"
|
||||||
|
# bob inserted correctly
|
||||||
|
assert rows["bob"].notes == "new user"
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_insert_batch_every_row_different_nullable_columns(
|
||||||
|
self, db_session: AsyncSession
|
||||||
|
):
|
||||||
|
"""Each row in the batch sets a different combination of nullable columns.
|
||||||
|
|
||||||
|
Tests that normalization produces valid SQL for all rows.
|
||||||
|
"""
|
||||||
|
registry = FixtureRegistry()
|
||||||
|
admin = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||||
|
uid1 = uuid.uuid4()
|
||||||
|
uid2 = uuid.uuid4()
|
||||||
|
uid3 = uuid.uuid4()
|
||||||
|
|
||||||
|
@registry.register
|
||||||
|
def users():
|
||||||
|
return [
|
||||||
|
User(
|
||||||
|
id=uid1,
|
||||||
|
username="all_set",
|
||||||
|
email="a@test.com",
|
||||||
|
role_id=admin.id,
|
||||||
|
notes="full",
|
||||||
|
),
|
||||||
|
User(
|
||||||
|
id=uid2, username="only_role", email="b@test.com", role_id=admin.id
|
||||||
|
),
|
||||||
|
User(
|
||||||
|
id=uid3, username="only_notes", email="c@test.com", notes="partial"
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
await load_fixtures(db_session, registry, "users", strategy=LoadStrategy.INSERT)
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
rows = {
|
||||||
|
r.username: r
|
||||||
|
for r in (await db_session.execute(select(User))).scalars().all()
|
||||||
|
}
|
||||||
|
assert rows["all_set"].role_id == admin.id
|
||||||
|
assert rows["all_set"].notes == "full"
|
||||||
|
assert rows["only_role"].role_id == admin.id
|
||||||
|
assert rows["only_role"].notes is None
|
||||||
|
assert rows["only_notes"].role_id is None
|
||||||
|
assert rows["only_notes"].notes == "partial"
|
||||||
|
|||||||
1179
tests/test_models.py
1179
tests/test_models.py
File diff suppressed because it is too large
Load Diff
@@ -7,9 +7,10 @@ from fastapi import Depends, FastAPI
|
|||||||
from httpx import AsyncClient
|
from httpx import AsyncClient
|
||||||
from sqlalchemy import select, text
|
from sqlalchemy import select, text
|
||||||
from sqlalchemy.engine import make_url
|
from sqlalchemy.engine import make_url
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||||
from sqlalchemy.orm import selectinload
|
from sqlalchemy.orm import selectinload
|
||||||
|
|
||||||
|
from fastapi_toolsets.db import get_transaction
|
||||||
from fastapi_toolsets.fixtures import Context, FixtureRegistry
|
from fastapi_toolsets.fixtures import Context, FixtureRegistry
|
||||||
from fastapi_toolsets.pytest import (
|
from fastapi_toolsets.pytest import (
|
||||||
create_async_client,
|
create_async_client,
|
||||||
@@ -336,6 +337,55 @@ class TestCreateDbSession:
|
|||||||
result = await session.execute(select(Role))
|
result = await session.execute(select(Role))
|
||||||
assert result.all() == []
|
assert result.all() == []
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_get_transaction_commits_visible_to_separate_session(self):
|
||||||
|
"""Data written via get_transaction() is committed and visible to other sessions."""
|
||||||
|
role_id = uuid.uuid4()
|
||||||
|
|
||||||
|
async with create_db_session(DATABASE_URL, Base, drop_tables=False) as session:
|
||||||
|
# Simulate what _create_fixture_function does: insert via get_transaction
|
||||||
|
# with no explicit commit afterward.
|
||||||
|
async with get_transaction(session):
|
||||||
|
role = Role(id=role_id, name="visible_to_other_session")
|
||||||
|
session.add(role)
|
||||||
|
|
||||||
|
# The data must have been committed (begin/commit, not a savepoint),
|
||||||
|
# so a separate engine/session can read it.
|
||||||
|
other_engine = create_async_engine(DATABASE_URL, echo=False)
|
||||||
|
try:
|
||||||
|
other_session_maker = async_sessionmaker(
|
||||||
|
other_engine, expire_on_commit=False
|
||||||
|
)
|
||||||
|
async with other_session_maker() as other:
|
||||||
|
result = await other.execute(select(Role).where(Role.id == role_id))
|
||||||
|
fetched = result.scalar_one_or_none()
|
||||||
|
assert fetched is not None, (
|
||||||
|
"Fixture data inserted via get_transaction() must be committed "
|
||||||
|
"and visible to a separate session. If create_db_session uses "
|
||||||
|
"create_db_context, auto-begin forces get_transaction() into "
|
||||||
|
"savepoints instead of real commits."
|
||||||
|
)
|
||||||
|
assert fetched.name == "visible_to_other_session"
|
||||||
|
finally:
|
||||||
|
await other_engine.dispose()
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
async with create_db_session(DATABASE_URL, Base, drop_tables=True) as _:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeprecatedCleanupTables:
|
||||||
|
"""Tests for the deprecated cleanup_tables re-export in fastapi_toolsets.pytest."""
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_emits_deprecation_warning(self):
|
||||||
|
"""cleanup_tables imported from fastapi_toolsets.pytest emits DeprecationWarning."""
|
||||||
|
from fastapi_toolsets.pytest.utils import cleanup_tables
|
||||||
|
|
||||||
|
async with create_db_session(DATABASE_URL, Base, drop_tables=True) as session:
|
||||||
|
with pytest.warns(DeprecationWarning, match="fastapi_toolsets.db"):
|
||||||
|
await cleanup_tables(session, Base)
|
||||||
|
|
||||||
|
|
||||||
class TestGetXdistWorker:
|
class TestGetXdistWorker:
|
||||||
"""Tests for _get_xdist_worker helper."""
|
"""Tests for _get_xdist_worker helper."""
|
||||||
|
|||||||
1180
tests/test_security.py
Normal file
1180
tests/test_security.py
Normal file
File diff suppressed because it is too large
Load Diff
153
uv.lock
generated
153
uv.lock
generated
@@ -81,6 +81,76 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" },
|
{ url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bcrypt"
|
||||||
|
version = "5.0.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386, upload-time = "2025-09-25T19:50:47.829Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/13/85/3e65e01985fddf25b64ca67275bb5bdb4040bd1a53b66d355c6c37c8a680/bcrypt-5.0.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f3c08197f3039bec79cee59a606d62b96b16669cff3949f21e74796b6e3cd2be", size = 481806, upload-time = "2025-09-25T19:49:05.102Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/44/dc/01eb79f12b177017a726cbf78330eb0eb442fae0e7b3dfd84ea2849552f3/bcrypt-5.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:200af71bc25f22006f4069060c88ed36f8aa4ff7f53e67ff04d2ab3f1e79a5b2", size = 268626, upload-time = "2025-09-25T19:49:06.723Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8c/cf/e82388ad5959c40d6afd94fb4743cc077129d45b952d46bdc3180310e2df/bcrypt-5.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:baade0a5657654c2984468efb7d6c110db87ea63ef5a4b54732e7e337253e44f", size = 271853, upload-time = "2025-09-25T19:49:08.028Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/86/7134b9dae7cf0efa85671651341f6afa695857fae172615e960fb6a466fa/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c58b56cdfb03202b3bcc9fd8daee8e8e9b6d7e3163aa97c631dfcfcc24d36c86", size = 269793, upload-time = "2025-09-25T19:49:09.727Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cc/82/6296688ac1b9e503d034e7d0614d56e80c5d1a08402ff856a4549cb59207/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4bfd2a34de661f34d0bda43c3e4e79df586e4716ef401fe31ea39d69d581ef23", size = 289930, upload-time = "2025-09-25T19:49:11.204Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d1/18/884a44aa47f2a3b88dd09bc05a1e40b57878ecd111d17e5bba6f09f8bb77/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ed2e1365e31fc73f1825fa830f1c8f8917ca1b3ca6185773b349c20fd606cec2", size = 272194, upload-time = "2025-09-25T19:49:12.524Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0e/8f/371a3ab33c6982070b674f1788e05b656cfbf5685894acbfef0c65483a59/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:83e787d7a84dbbfba6f250dd7a5efd689e935f03dd83b0f919d39349e1f23f83", size = 269381, upload-time = "2025-09-25T19:49:14.308Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b1/34/7e4e6abb7a8778db6422e88b1f06eb07c47682313997ee8a8f9352e5a6f1/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:137c5156524328a24b9fac1cb5db0ba618bc97d11970b39184c1d87dc4bf1746", size = 271750, upload-time = "2025-09-25T19:49:15.584Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c0/1b/54f416be2499bd72123c70d98d36c6cd61a4e33d9b89562c22481c81bb30/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:38cac74101777a6a7d3b3e3cfefa57089b5ada650dce2baf0cbdd9d65db22a9e", size = 303757, upload-time = "2025-09-25T19:49:17.244Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/13/62/062c24c7bcf9d2826a1a843d0d605c65a755bc98002923d01fd61270705a/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:d8d65b564ec849643d9f7ea05c6d9f0cd7ca23bdd4ac0c2dbef1104ab504543d", size = 306740, upload-time = "2025-09-25T19:49:18.693Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d5/c8/1fdbfc8c0f20875b6b4020f3c7dc447b8de60aa0be5faaf009d24242aec9/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:741449132f64b3524e95cd30e5cd3343006ce146088f074f31ab26b94e6c75ba", size = 334197, upload-time = "2025-09-25T19:49:20.523Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a6/c1/8b84545382d75bef226fbc6588af0f7b7d095f7cd6a670b42a86243183cd/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:212139484ab3207b1f0c00633d3be92fef3c5f0af17cad155679d03ff2ee1e41", size = 352974, upload-time = "2025-09-25T19:49:22.254Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/10/a6/ffb49d4254ed085e62e3e5dd05982b4393e32fe1e49bb1130186617c29cd/bcrypt-5.0.0-cp313-cp313t-win32.whl", hash = "sha256:9d52ed507c2488eddd6a95bccee4e808d3234fa78dd370e24bac65a21212b861", size = 148498, upload-time = "2025-09-25T19:49:24.134Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/48/a9/259559edc85258b6d5fc5471a62a3299a6aa37a6611a169756bf4689323c/bcrypt-5.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f6984a24db30548fd39a44360532898c33528b74aedf81c26cf29c51ee47057e", size = 145853, upload-time = "2025-09-25T19:49:25.702Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2d/df/9714173403c7e8b245acf8e4be8876aac64a209d1b392af457c79e60492e/bcrypt-5.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9fffdb387abe6aa775af36ef16f55e318dcda4194ddbf82007a6f21da29de8f5", size = 139626, upload-time = "2025-09-25T19:49:26.928Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f8/14/c18006f91816606a4abe294ccc5d1e6f0e42304df5a33710e9e8e95416e1/bcrypt-5.0.0-cp314-cp314t-macosx_10_12_universal2.whl", hash = "sha256:4870a52610537037adb382444fefd3706d96d663ac44cbb2f37e3919dca3d7ef", size = 481862, upload-time = "2025-09-25T19:49:28.365Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/67/49/dd074d831f00e589537e07a0725cf0e220d1f0d5d8e85ad5bbff251c45aa/bcrypt-5.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48f753100931605686f74e27a7b49238122aa761a9aefe9373265b8b7aa43ea4", size = 268544, upload-time = "2025-09-25T19:49:30.39Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f5/91/50ccba088b8c474545b034a1424d05195d9fcbaaf802ab8bfe2be5a4e0d7/bcrypt-5.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70aadb7a809305226daedf75d90379c397b094755a710d7014b8b117df1ebbf", size = 271787, upload-time = "2025-09-25T19:49:32.144Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/e7/d7dba133e02abcda3b52087a7eea8c0d4f64d3e593b4fffc10c31b7061f3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:744d3c6b164caa658adcb72cb8cc9ad9b4b75c7db507ab4bc2480474a51989da", size = 269753, upload-time = "2025-09-25T19:49:33.885Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/33/fc/5b145673c4b8d01018307b5c2c1fc87a6f5a436f0ad56607aee389de8ee3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a28bc05039bdf3289d757f49d616ab3efe8cf40d8e8001ccdd621cd4f98f4fc9", size = 289587, upload-time = "2025-09-25T19:49:35.144Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/27/d7/1ff22703ec6d4f90e62f1a5654b8867ef96bafb8e8102c2288333e1a6ca6/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7f277a4b3390ab4bebe597800a90da0edae882c6196d3038a73adf446c4f969f", size = 272178, upload-time = "2025-09-25T19:49:36.793Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c8/88/815b6d558a1e4d40ece04a2f84865b0fef233513bd85fd0e40c294272d62/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:79cfa161eda8d2ddf29acad370356b47f02387153b11d46042e93a0a95127493", size = 269295, upload-time = "2025-09-25T19:49:38.164Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/51/8c/e0db387c79ab4931fc89827d37608c31cc57b6edc08ccd2386139028dc0d/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a5393eae5722bcef046a990b84dff02b954904c36a194f6cfc817d7dca6c6f0b", size = 271700, upload-time = "2025-09-25T19:49:39.917Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/06/83/1570edddd150f572dbe9fc00f6203a89fc7d4226821f67328a85c330f239/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f4c94dec1b5ab5d522750cb059bb9409ea8872d4494fd152b53cca99f1ddd8c", size = 334034, upload-time = "2025-09-25T19:49:41.227Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c9/f2/ea64e51a65e56ae7a8a4ec236c2bfbdd4b23008abd50ac33fbb2d1d15424/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0cae4cb350934dfd74c020525eeae0a5f79257e8a201c0c176f4b84fdbf2a4b4", size = 352766, upload-time = "2025-09-25T19:49:43.08Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d7/d4/1a388d21ee66876f27d1a1f41287897d0c0f1712ef97d395d708ba93004c/bcrypt-5.0.0-cp314-cp314t-win32.whl", hash = "sha256:b17366316c654e1ad0306a6858e189fc835eca39f7eb2cafd6aaca8ce0c40a2e", size = 152449, upload-time = "2025-09-25T19:49:44.971Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3f/61/3291c2243ae0229e5bca5d19f4032cecad5dfb05a2557169d3a69dc0ba91/bcrypt-5.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:92864f54fb48b4c718fc92a32825d0e42265a627f956bc0361fe869f1adc3e7d", size = 149310, upload-time = "2025-09-25T19:49:46.162Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3e/89/4b01c52ae0c1a681d4021e5dd3e45b111a8fb47254a274fa9a378d8d834b/bcrypt-5.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dd19cf5184a90c873009244586396a6a884d591a5323f0e8a5922560718d4993", size = 143761, upload-time = "2025-09-25T19:49:47.345Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553, upload-time = "2025-09-25T19:49:49.006Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009, upload-time = "2025-09-25T19:49:50.581Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029, upload-time = "2025-09-25T19:49:52.533Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907, upload-time = "2025-09-25T19:49:54.709Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500, upload-time = "2025-09-25T19:49:56.013Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412, upload-time = "2025-09-25T19:49:57.356Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486, upload-time = "2025-09-25T19:49:59.116Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940, upload-time = "2025-09-25T19:50:00.869Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776, upload-time = "2025-09-25T19:50:02.393Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922, upload-time = "2025-09-25T19:50:04.232Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367, upload-time = "2025-09-25T19:50:05.559Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187, upload-time = "2025-09-25T19:50:06.916Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752, upload-time = "2025-09-25T19:50:08.515Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881, upload-time = "2025-09-25T19:50:09.742Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931, upload-time = "2025-09-25T19:50:11.016Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313, upload-time = "2025-09-25T19:50:12.309Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290, upload-time = "2025-09-25T19:50:13.673Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253, upload-time = "2025-09-25T19:50:15.089Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084, upload-time = "2025-09-25T19:50:16.699Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185, upload-time = "2025-09-25T19:50:18.525Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656, upload-time = "2025-09-25T19:50:19.809Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662, upload-time = "2025-09-25T19:50:21.567Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240, upload-time = "2025-09-25T19:50:23.305Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152, upload-time = "2025-09-25T19:50:24.597Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284, upload-time = "2025-09-25T19:50:26.268Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643, upload-time = "2025-09-25T19:50:28.02Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698, upload-time = "2025-09-25T19:50:31.347Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8a/75/4aa9f5a4d40d762892066ba1046000b329c7cd58e888a6db878019b282dc/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534", size = 271180, upload-time = "2025-09-25T19:50:38.575Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/54/79/875f9558179573d40a9cc743038ac2bf67dfb79cecb1e8b5d70e88c94c3d/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4", size = 273791, upload-time = "2025-09-25T19:50:39.913Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bc/fe/975adb8c216174bf70fc17535f75e85ac06ed5252ea077be10d9cff5ce24/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911", size = 270746, upload-time = "2025-09-25T19:50:43.306Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e4/f8/972c96f5a2b6c4b3deca57009d93e946bbdbe2241dca9806d502f29dd3ee/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4", size = 273375, upload-time = "2025-09-25T19:50:45.43Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "certifi"
|
name = "certifi"
|
||||||
version = "2026.1.4"
|
version = "2026.1.4"
|
||||||
@@ -282,9 +352,11 @@ pytest = [
|
|||||||
|
|
||||||
[package.dev-dependencies]
|
[package.dev-dependencies]
|
||||||
dev = [
|
dev = [
|
||||||
|
{ name = "bcrypt" },
|
||||||
{ name = "coverage" },
|
{ name = "coverage" },
|
||||||
{ name = "fastapi-toolsets", extra = ["all"] },
|
{ name = "fastapi-toolsets", extra = ["all"] },
|
||||||
{ name = "httpx" },
|
{ name = "httpx" },
|
||||||
|
{ name = "mike" },
|
||||||
{ name = "mkdocstrings-python" },
|
{ name = "mkdocstrings-python" },
|
||||||
{ name = "prek" },
|
{ name = "prek" },
|
||||||
{ name = "pytest" },
|
{ name = "pytest" },
|
||||||
@@ -296,9 +368,13 @@ dev = [
|
|||||||
{ name = "zensical" },
|
{ name = "zensical" },
|
||||||
]
|
]
|
||||||
docs = [
|
docs = [
|
||||||
|
{ name = "mike" },
|
||||||
{ name = "mkdocstrings-python" },
|
{ name = "mkdocstrings-python" },
|
||||||
{ name = "zensical" },
|
{ name = "zensical" },
|
||||||
]
|
]
|
||||||
|
docs-src = [
|
||||||
|
{ name = "bcrypt" },
|
||||||
|
]
|
||||||
tests = [
|
tests = [
|
||||||
{ name = "coverage" },
|
{ name = "coverage" },
|
||||||
{ name = "httpx" },
|
{ name = "httpx" },
|
||||||
@@ -325,9 +401,11 @@ provides-extras = ["cli", "metrics", "pytest", "all"]
|
|||||||
|
|
||||||
[package.metadata.requires-dev]
|
[package.metadata.requires-dev]
|
||||||
dev = [
|
dev = [
|
||||||
|
{ name = "bcrypt", specifier = ">=4.0.0" },
|
||||||
{ name = "coverage", specifier = ">=7.0.0" },
|
{ name = "coverage", specifier = ">=7.0.0" },
|
||||||
{ name = "fastapi-toolsets", extras = ["all"] },
|
{ name = "fastapi-toolsets", extras = ["all"] },
|
||||||
{ name = "httpx", specifier = ">=0.25.0" },
|
{ name = "httpx", specifier = ">=0.25.0" },
|
||||||
|
{ name = "mike", git = "https://github.com/squidfunk/mike.git?tag=2.2.0%2Bzensical-0.1.0" },
|
||||||
{ name = "mkdocstrings-python", specifier = ">=2.0.2" },
|
{ name = "mkdocstrings-python", specifier = ">=2.0.2" },
|
||||||
{ name = "prek", specifier = ">=0.3.8" },
|
{ name = "prek", specifier = ">=0.3.8" },
|
||||||
{ name = "pytest", specifier = ">=8.0.0" },
|
{ name = "pytest", specifier = ">=8.0.0" },
|
||||||
@@ -336,12 +414,14 @@ dev = [
|
|||||||
{ name = "pytest-xdist", specifier = ">=3.0.0" },
|
{ name = "pytest-xdist", specifier = ">=3.0.0" },
|
||||||
{ name = "ruff", specifier = ">=0.1.0" },
|
{ name = "ruff", specifier = ">=0.1.0" },
|
||||||
{ name = "ty", specifier = ">=0.0.1a0" },
|
{ name = "ty", specifier = ">=0.0.1a0" },
|
||||||
{ name = "zensical", specifier = ">=0.0.23" },
|
{ name = "zensical", specifier = ">=0.0.30" },
|
||||||
]
|
]
|
||||||
docs = [
|
docs = [
|
||||||
|
{ name = "mike", git = "https://github.com/squidfunk/mike.git?tag=2.2.0%2Bzensical-0.1.0" },
|
||||||
{ name = "mkdocstrings-python", specifier = ">=2.0.2" },
|
{ name = "mkdocstrings-python", specifier = ">=2.0.2" },
|
||||||
{ name = "zensical", specifier = ">=0.0.23" },
|
{ name = "zensical", specifier = ">=0.0.30" },
|
||||||
]
|
]
|
||||||
|
docs-src = [{ name = "bcrypt", specifier = ">=4.0.0" }]
|
||||||
tests = [
|
tests = [
|
||||||
{ name = "coverage", specifier = ">=7.0.0" },
|
{ name = "coverage", specifier = ">=7.0.0" },
|
||||||
{ name = "httpx", specifier = ">=0.25.0" },
|
{ name = "httpx", specifier = ">=0.25.0" },
|
||||||
@@ -604,6 +684,17 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354, upload-time = "2021-02-05T18:55:29.583Z" },
|
{ url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354, upload-time = "2021-02-05T18:55:29.583Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mike"
|
||||||
|
version = "2.2.0+zensical.0.1.0"
|
||||||
|
source = { git = "https://github.com/squidfunk/mike.git?tag=2.2.0%2Bzensical-0.1.0#0f62791256ebeba60d20d2f1d8fe6ec3b7d1e2b3" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "jinja2" },
|
||||||
|
{ name = "pyparsing" },
|
||||||
|
{ name = "verspec" },
|
||||||
|
{ name = "zensical" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mkdocs"
|
name = "mkdocs"
|
||||||
version = "1.6.1"
|
version = "1.6.1"
|
||||||
@@ -870,24 +961,33 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pygments"
|
name = "pygments"
|
||||||
version = "2.19.2"
|
version = "2.20.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
{ url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pymdown-extensions"
|
name = "pymdown-extensions"
|
||||||
version = "10.21"
|
version = "10.21.2"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "markdown" },
|
{ name = "markdown" },
|
||||||
{ name = "pyyaml" },
|
{ name = "pyyaml" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/ba/63/06673d1eb6d8f83c0ea1f677d770e12565fb516928b4109c9e2055656a9e/pymdown_extensions-10.21.tar.gz", hash = "sha256:39f4a020f40773f6b2ff31d2cd2546c2c04d0a6498c31d9c688d2be07e1767d5", size = 853363, upload-time = "2026-02-15T20:44:06.748Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/df/08/f1c908c581fd11913da4711ea7ba32c0eee40b0190000996bb863b0c9349/pymdown_extensions-10.21.2.tar.gz", hash = "sha256:c3f55a5b8a1d0edf6699e35dcbea71d978d34ff3fa79f3d807b8a5b3fa90fbdc", size = 853922, upload-time = "2026-03-29T15:01:55.233Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/6f/2c/5b079febdc65e1c3fb2729bf958d18b45be7113828528e8a0b5850dd819a/pymdown_extensions-10.21-py3-none-any.whl", hash = "sha256:91b879f9f864d49794c2d9534372b10150e6141096c3908a455e45ca72ad9d3f", size = 268877, upload-time = "2026-02-15T20:44:05.464Z" },
|
{ url = "https://files.pythonhosted.org/packages/f7/27/a2fc51a4a122dfd1015e921ae9d22fee3d20b0b8080d9a704578bf9deece/pymdown_extensions-10.21.2-py3-none-any.whl", hash = "sha256:5c0fd2a2bea14eb39af8ff284f1066d898ab2187d81b889b75d46d4348c01638", size = 268901, upload-time = "2026-03-29T15:01:53.244Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pyparsing"
|
||||||
|
version = "3.3.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1262,6 +1362,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
|
{ url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "verspec"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/e7/44/8126f9f0c44319b2efc65feaad589cadef4d77ece200ae3c9133d58464d0/verspec-0.1.0.tar.gz", hash = "sha256:c4504ca697b2056cdb4bfa7121461f5a0e81809255b41c03dda4ba823637c01e", size = 27123, upload-time = "2020-11-30T02:24:09.646Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a4/ce/3b6fee91c85626eaf769d617f1be9d2e15c1cca027bbdeb2e0d751469355/verspec-0.1.0-py3-none-any.whl", hash = "sha256:741877d5633cc9464c45a469ae2a31e801e6dbbaa85b9675d481cda100f11c31", size = 19640, upload-time = "2020-11-30T02:24:08.387Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "watchdog"
|
name = "watchdog"
|
||||||
version = "6.0.0"
|
version = "6.0.0"
|
||||||
@@ -1291,7 +1400,7 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zensical"
|
name = "zensical"
|
||||||
version = "0.0.29"
|
version = "0.0.30"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "click" },
|
{ name = "click" },
|
||||||
@@ -1301,18 +1410,18 @@ dependencies = [
|
|||||||
{ name = "pymdown-extensions" },
|
{ name = "pymdown-extensions" },
|
||||||
{ name = "pyyaml" },
|
{ name = "pyyaml" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/78/bd/5786ab618a60bd7469ab243a7fd2c9eecb0790c85c784abb8b97edb77a54/zensical-0.0.29.tar.gz", hash = "sha256:0d6282be7cb551e12d5806badf5e94c54a5e2f2cf07057a3e36d1eaf97c33ada", size = 3842641, upload-time = "2026-03-24T13:37:27.587Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/1d/53/5e551f8912718816733a75adcb53a0787b2d2edca5869c156325aaf82e24/zensical-0.0.30.tar.gz", hash = "sha256:408b531683f6bcb6cc5ab928146d2c68afbc16fac4eda87ae3dd20af1498180f", size = 3844287, upload-time = "2026-03-28T17:55:52.836Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/4b/9c/8b681daa024abca9763017bec09ecee8008e110cae1254217c8dd22cc339/zensical-0.0.29-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:20ae0709ea14fce25ab33d0a82acdaf454a7a2e232a9ee20c019942205174476", size = 12311399, upload-time = "2026-03-24T13:36:53.809Z" },
|
{ url = "https://files.pythonhosted.org/packages/1b/e3/ac0eb77a8a7f793613813de68bde26776d0da68d8041fa9eb8d0b986a449/zensical-0.0.30-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b67fca8bfcd71c94b331045a591bf6e24fe123a66fba94587aa3379faf521a16", size = 12313786, upload-time = "2026-03-28T17:55:18.839Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/81/ae/4ebb4d8bb2ef0164d473698b92f11caf431fc436e1625524acd5641102ca/zensical-0.0.29-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:599af3ba66fcd0146d7019f3493ed3c316051fae6c4d5599bc59f3a8f4b8a6f0", size = 12191845, upload-time = "2026-03-24T13:36:56.909Z" },
|
{ url = "https://files.pythonhosted.org/packages/a5/6a/73e461dfa27d3bc415e48396f83a3287b43df2fd3361e25146bc86360aab/zensical-0.0.30-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:8ceadfece1153edc26506e8ddf68d9818afe8517cf3bcdb6bfe4cb2793ae247b", size = 12186136, upload-time = "2026-03-28T17:55:21.836Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/d5/35/67f89db06571a52283b3ecbe3bcf32fd3115ca50436b3ae177a948b83ea7/zensical-0.0.29-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eea7e48a00a71c0586e875079b5f83a070c33a147e52ad4383e4b63ab524332b", size = 12554105, upload-time = "2026-03-24T13:36:59.945Z" },
|
{ url = "https://files.pythonhosted.org/packages/a3/bc/9022156b4c28c1b95209acb64319b1e5cd0af2e97035bdd461e58408cb46/zensical-0.0.30-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e100b2b654337ac5306ba12818f3c5336c66d0d34c593ef05e316c124a5819cb", size = 12556115, upload-time = "2026-03-28T17:55:24.849Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/7c/f6/ac79e5d9c18b28557c9ff1c7c23d695fbdd82645d69bfe02292f46d935e7/zensical-0.0.29-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:59a57db35542e98d2896b833de07d199320f8ada3b4e7ddccb7fe892292d8b74", size = 12498643, upload-time = "2026-03-24T13:37:02.376Z" },
|
{ url = "https://files.pythonhosted.org/packages/0b/29/9e8f5bd6d33b35f4c368ae8b13d431dc42b2de17ea6eccbd71d48122eba6/zensical-0.0.30-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdf641ffddaf21c6971b91a4426b81cd76271c5b1adb7176afcce3f1508328b1", size = 12498121, upload-time = "2026-03-28T17:55:27.637Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/b1/70/5c22a96a69e0e91e569c26236918bb9bab1170f59b29ad04105ead64f199/zensical-0.0.29-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d42c2b2a96a80cf64c98ba7242f59ef95109914bd4c9499d7ebc12544663852c", size = 12854531, upload-time = "2026-03-24T13:37:04.962Z" },
|
{ url = "https://files.pythonhosted.org/packages/c4/e1/b8dfa0769050e62cd731358145fdeb67af35e322197bd7e7727250596e7b/zensical-0.0.30-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fd909a0c2116e26190c7f3ec4fb55837c417b7a8d99ebf4f3deb26b07b97e49", size = 12854142, upload-time = "2026-03-28T17:55:30.54Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/79/25/e32237a8fcb0ceae1ef8e192e7f8db53b38f1e48f1c7cdbacd0a7b713892/zensical-0.0.29-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b2fca39c5f6b1782c77cf6591cf346357cabee85ebdb956c5ddc0fd5169f3d9", size = 12596828, upload-time = "2026-03-24T13:37:07.817Z" },
|
{ url = "https://files.pythonhosted.org/packages/04/11/62a36cfb81522b6108db8f9e96d36da8cccb306b02c15ad19e1b333fa7c8/zensical-0.0.30-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16fd2da09fe4e5cbec2ca74f31abc70f32f7330d56593b647e0a114bb329171a", size = 12598341, upload-time = "2026-03-28T17:55:32.988Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ff/74/89ac909cbb258903ea53802c184e4986c17ce0ba79b1c7f77b7e78a2dce3/zensical-0.0.29-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dfc23a74ef672aa51088c080286319da1dc0b989cd5051e9e5e6d7d4abbc2fc1", size = 12732059, upload-time = "2026-03-24T13:37:11.651Z" },
|
{ url = "https://files.pythonhosted.org/packages/a7/a4/8c7a6725fb226aa71d19209403d974e45f39d757e725f9558c6ed8d350a5/zensical-0.0.30-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:896b36eaef7fed5f8fc6f2c8264b2751aad63c2d66d3d8650e38481b6b4f6f7b", size = 12732307, upload-time = "2026-03-28T17:55:35.618Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/8c/31/2429de6a9328eed4acc7e9a3789f160294a15115be15f9870a0d02649302/zensical-0.0.29-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:c9336d4e4b232e3c9a70e30258e916dd7e60c0a2a08c8690065e60350c302028", size = 12768542, upload-time = "2026-03-24T13:37:14.39Z" },
|
{ url = "https://files.pythonhosted.org/packages/5e/a1/7858fb3f6ac67d7d24a8acbe834cbe26851d6bd151ece6fba3fc88b0f878/zensical-0.0.30-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:a1f515ec67a0d0250e53846327bf0c69635a1f39749da3b04feb68431188d3c6", size = 12770962, upload-time = "2026-03-28T17:55:38.627Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/10/8a/55588b2a1dcbe86dad0404506c9ba367a06c663b1ff47147c84d26f7510e/zensical-0.0.29-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:30661148f0681199f3b598cbeb1d54f5cba773e54ae840bac639250d85907b84", size = 12917991, upload-time = "2026-03-24T13:37:16.795Z" },
|
{ url = "https://files.pythonhosted.org/packages/49/b7/228298112a69d0b74e6e93041bffcf1fc96d03cf252be94a354f277d4789/zensical-0.0.30-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:ce33d1002438838a35fa43358a1f43d74f874586596d3d116999d3756cded00e", size = 12919256, upload-time = "2026-03-28T17:55:41.413Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ec/5d/653901f0d3a3ca72daebc62746a148797f4e422cc3a2b66a4e6718e4398f/zensical-0.0.29-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6a566ac1fd4bfac5d711a7bd1ae06666712127c2718daa5083c7bf3f107e8578", size = 12868392, upload-time = "2026-03-24T13:37:19.42Z" },
|
{ url = "https://files.pythonhosted.org/packages/de/c7/5b4ea036f7f7d84abf907f7f7a3e8420b054c89279c5273ca248d3bc9f48/zensical-0.0.30-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:029dad561568f4ae3056dde16a81012efd92c426d4eb7101f960f448c1168196", size = 12869760, upload-time = "2026-03-28T17:55:44.474Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/29/58/d7449bc88a174b98daa3f2fbdfbdac3493768a557d8987e88bdaa6c78b1a/zensical-0.0.29-cp310-abi3-win32.whl", hash = "sha256:a231a3a02a3851741dc4d2de8910b5c39fe81e55bf026d8edf4d803e91a922fb", size = 11905486, upload-time = "2026-03-24T13:37:22.154Z" },
|
{ url = "https://files.pythonhosted.org/packages/36/b4/77bef2132e43108db718ae014a5961fc511e88fc446c11f1c3483def429e/zensical-0.0.30-cp310-abi3-win32.whl", hash = "sha256:0105672850f053c326fba9fdd95adf60e9f90308f8cc1c08e3a00e15a8d5e90f", size = 11905658, upload-time = "2026-03-28T17:55:47.416Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/f5/09/3fd082d016497c4d26ff20f42a8be2cc91e27191c0c5f3cd6507827f666f/zensical-0.0.29-cp310-abi3-win_amd64.whl", hash = "sha256:7145c5504380a344b8cd4586da815cdde77ef4a42319fa4f35e78250f01985af", size = 12101510, upload-time = "2026-03-24T13:37:24.77Z" },
|
{ url = "https://files.pythonhosted.org/packages/a1/59/23b6c7ff062e2b299cc60e333095e853f9d38d1b5abe743c7b94c4ac432c/zensical-0.0.30-cp310-abi3-win_amd64.whl", hash = "sha256:b879dbf4c69d3ea41694bae33e1b948847e635dcbcd6ec8c522920833379dd48", size = 12101867, upload-time = "2026-03-28T17:55:50.083Z" },
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -140,6 +140,7 @@ Examples = [
|
|||||||
|
|
||||||
[[project.nav]]
|
[[project.nav]]
|
||||||
Migration = [
|
Migration = [
|
||||||
|
{"v3.0" = "migration/v3.md"},
|
||||||
{"v2.0" = "migration/v2.md"},
|
{"v2.0" = "migration/v2.md"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user