mirror of
https://github.com/d3vyce/fastapi-toolsets.git
synced 2026-04-16 14:46:24 +02:00
Compare commits
57 Commits
8a16f2808e
...
v3.0.3
| Author | SHA1 | Date | |
|---|---|---|---|
|
0ed93d62c8
|
|||
|
|
2a49814818 | ||
|
|
f8e090c7c3 | ||
|
|
54decaf3e1 | ||
|
6b127d9645
|
|||
|
|
8bed96f4bf | ||
|
|
74d15e13bc | ||
|
|
e38d8d2d4f | ||
|
9b74f162ab
|
|||
|
|
ab125c6ea1 | ||
|
|
e388e26858 | ||
|
|
04da241294 | ||
|
|
bbe63edc46 | ||
|
|
0b17c77dee | ||
|
|
bce71bfd42 | ||
|
|
2f1eb4d468 | ||
|
|
1f06eab11d | ||
|
|
fac9aa6f60 | ||
|
|
f310466697 | ||
|
|
32059dcb02 | ||
|
|
f027981e80 | ||
|
|
5c1487c24a | ||
|
|
ebaa61525f | ||
|
|
4829cfba73 | ||
|
|
9ca2da4213 | ||
|
|
0b3f097012 | ||
|
|
1890d696bf | ||
|
|
104285c6e5 | ||
|
|
f5afbbe37f | ||
|
|
f4698bea8a | ||
|
|
5215b921ae | ||
|
9dad59e25d
|
|||
|
|
29326ab532 | ||
|
|
04afef7e33 | ||
|
|
666c621fda | ||
|
460b760fa4
|
|||
|
|
65d0b0e0b1 | ||
|
|
2d49cd32db | ||
|
|
a5dd756d87 | ||
|
|
781cfb66c9 | ||
|
|
91b84f8146 | ||
|
|
396e381ac3 | ||
|
|
b4eb4c1ca9 | ||
|
c90717754f
|
|||
|
337985ef38
|
|||
|
|
b5e6dfe6fe | ||
|
|
6681b7ade7 | ||
|
|
6981c33dc8 | ||
|
|
0c7a99039c | ||
|
|
bcb5b0bfda | ||
|
100e1c1aa9
|
|||
|
|
db6c7a565f | ||
|
|
768e405554 | ||
|
|
f0223ebde4 | ||
|
|
f8c9bf69fe | ||
|
6d6fae5538
|
|||
|
|
fc9cd1f034 |
7
.github/workflows/ci.yml
vendored
7
.github/workflows/ci.yml
vendored
@@ -6,6 +6,9 @@ on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
@@ -93,7 +96,7 @@ jobs:
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
if: matrix.python-version == '3.14'
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v6
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
report_type: coverage
|
||||
@@ -102,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Upload test results to Codecov
|
||||
if: matrix.python-version == '3.14'
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v6
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
report_type: test_results
|
||||
|
||||
43
.github/workflows/docs.yml
vendored
43
.github/workflows/docs.yml
vendored
@@ -5,20 +5,15 @@ on:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/configure-pages@v5
|
||||
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
@@ -28,11 +23,31 @@ jobs:
|
||||
|
||||
- run: uv sync --group dev
|
||||
|
||||
- run: uv run zensical build --clean
|
||||
- name: Configure git
|
||||
run: |
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
- uses: actions/upload-pages-artifact@v4
|
||||
with:
|
||||
path: site
|
||||
- name: Deploy documentation
|
||||
run: |
|
||||
VERSION=${GITHUB_REF_NAME#v}
|
||||
MAJOR=$(echo "$VERSION" | cut -d. -f1)
|
||||
DEPLOY_VERSION="v$(echo "$VERSION" | cut -d. -f1-2)"
|
||||
|
||||
- uses: actions/deploy-pages@v4
|
||||
id: deployment
|
||||
# On new major: keep only the latest feature version of the previous major
|
||||
PREV_MAJOR=$((MAJOR - 1))
|
||||
OLD_FEATURE_VERSIONS=$(uv run mike list 2>/dev/null | grep -oE "^v${PREV_MAJOR}\.[0-9]+" || true)
|
||||
|
||||
if [ -n "$OLD_FEATURE_VERSIONS" ]; then
|
||||
LATEST_PREV=$(echo "$OLD_FEATURE_VERSIONS" | sort -t. -k2 -n | tail -1)
|
||||
echo "$OLD_FEATURE_VERSIONS" | while read -r OLD_V; do
|
||||
if [ "$OLD_V" != "$LATEST_PREV" ]; then
|
||||
echo "Deleting $OLD_V"
|
||||
uv run mike delete "$OLD_V"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
uv run mike deploy --update-aliases "$DEPLOY_VERSION" stable
|
||||
uv run mike set-default stable
|
||||
git push origin gh-pages
|
||||
|
||||
34
.pre-commit-config.yaml
Normal file
34
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,34 @@
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: check-added-large-files
|
||||
args: ["--maxkb=750"]
|
||||
exclude: ^uv.lock$
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: local-ruff-check
|
||||
name: ruff check
|
||||
entry: uv run ruff check --force-exclude --fix --exit-non-zero-on-fix .
|
||||
require_serial: true
|
||||
language: unsupported
|
||||
types: [python]
|
||||
|
||||
- id: local-ruff-format
|
||||
name: ruff format
|
||||
entry: uv run ruff format --force-exclude --exit-non-zero-on-format .
|
||||
require_serial: true
|
||||
language: unsupported
|
||||
types: [python]
|
||||
|
||||
- id: local-ty
|
||||
name: ty check
|
||||
entry: uv run ty check
|
||||
require_serial: true
|
||||
language: unsupported
|
||||
pass_filenames: false
|
||||
@@ -48,7 +48,8 @@ uv add "fastapi-toolsets[all]"
|
||||
- **Database**: Session management, transaction helpers, table locking, and polling-based row change detection
|
||||
- **Dependencies**: FastAPI dependency factories (`PathDependency`, `BodyDependency`) for automatic DB lookups from path or body parameters
|
||||
- **Fixtures**: Fixture system with dependency management, context support, and pytest integration
|
||||
- **Model Mixins**: SQLAlchemy mixins for common column patterns (`UUIDMixin`, `UUIDv7Mixin`, `CreatedAtMixin`, `UpdatedAtMixin`, `TimestampMixin`) and lifecycle callbacks (`WatchedFieldsMixin`, `@watch`) that fire after commit for insert, update, and delete events
|
||||
- **Model Mixins**: SQLAlchemy mixins for common column patterns (`UUIDMixin`, `UUIDv7Mixin`, `CreatedAtMixin`, `UpdatedAtMixin`, `TimestampMixin`)
|
||||
- **Lifecycle Events**: Post-commit event system (`EventSession`, `listens_for`) that dispatches async/sync callbacks for insert, update, and delete operations
|
||||
- **Standardized API Responses**: Consistent response format with `Response`, `ErrorResponse`, `PaginatedResponse`, `CursorPaginatedResponse` and `OffsetPaginatedResponse`.
|
||||
- **Exception Handling**: Structured error responses with automatic OpenAPI documentation
|
||||
- **Logging**: Logging configuration with uvicorn integration via `configure_logging` and `get_logger`
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
# Authentication
|
||||
@@ -43,16 +43,16 @@ Declare `searchable_fields`, `facet_fields`, and `order_fields` once on [`CrudFa
|
||||
|
||||
## Routes
|
||||
|
||||
```python title="routes.py:1:17"
|
||||
--8<-- "docs_src/examples/pagination_search/routes.py:1:17"
|
||||
```python title="routes.py:1:16"
|
||||
--8<-- "docs_src/examples/pagination_search/routes.py:1:16"
|
||||
```
|
||||
|
||||
### Offset pagination
|
||||
|
||||
Best for admin panels or any UI that needs a total item count and numbered pages.
|
||||
|
||||
```python title="routes.py:20:40"
|
||||
--8<-- "docs_src/examples/pagination_search/routes.py:20:40"
|
||||
```python title="routes.py:19:37"
|
||||
--8<-- "docs_src/examples/pagination_search/routes.py:19:37"
|
||||
```
|
||||
|
||||
**Example request**
|
||||
@@ -72,6 +72,7 @@ GET /articles/offset?page=2&items_per_page=10&search=fastapi&status=published&or
|
||||
],
|
||||
"pagination": {
|
||||
"total_count": 42,
|
||||
"pages": 5,
|
||||
"page": 2,
|
||||
"items_per_page": 10,
|
||||
"has_more": true
|
||||
@@ -85,12 +86,14 @@ GET /articles/offset?page=2&items_per_page=10&search=fastapi&status=published&or
|
||||
|
||||
`filter_attributes` always reflects the values visible **after** applying the active filters. Use it to populate filter dropdowns on the client.
|
||||
|
||||
To skip the `COUNT(*)` query for better performance on large tables, pass `include_total=False`. `pagination.total_count` will be `null` in the response, while `has_more` remains accurate.
|
||||
|
||||
### Cursor pagination
|
||||
|
||||
Best for feeds, infinite scroll, or any high-throughput API where offset performance degrades.
|
||||
|
||||
```python title="routes.py:43:63"
|
||||
--8<-- "docs_src/examples/pagination_search/routes.py:43:63"
|
||||
```python title="routes.py:40:58"
|
||||
--8<-- "docs_src/examples/pagination_search/routes.py:40:58"
|
||||
```
|
||||
|
||||
**Example request**
|
||||
@@ -129,8 +132,8 @@ Pass `next_cursor` as the `cursor` query parameter on the next request to advanc
|
||||
|
||||
[`paginate()`](../module/crud.md#unified-paginate--both-strategies-on-one-endpoint) lets a single endpoint support both strategies via a `pagination_type` query parameter. The `pagination_type` field in the response acts as a discriminator for frontend tooling.
|
||||
|
||||
```python title="routes.py:66:90"
|
||||
--8<-- "docs_src/examples/pagination_search/routes.py:66:90"
|
||||
```python title="routes.py:61:79"
|
||||
--8<-- "docs_src/examples/pagination_search/routes.py:61:79"
|
||||
```
|
||||
|
||||
**Offset request** (default)
|
||||
@@ -144,7 +147,7 @@ GET /articles/?pagination_type=offset&page=1&items_per_page=10
|
||||
"status": "SUCCESS",
|
||||
"pagination_type": "offset",
|
||||
"data": ["..."],
|
||||
"pagination": { "total_count": 42, "page": 1, "items_per_page": 10, "has_more": true }
|
||||
"pagination": { "total_count": 42, "pages": 5, "page": 1, "items_per_page": 10, "has_more": true }
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -48,7 +48,8 @@ uv add "fastapi-toolsets[all]"
|
||||
- **Database**: Session management, transaction helpers, table locking, and polling-based row change detection
|
||||
- **Dependencies**: FastAPI dependency factories (`PathDependency`, `BodyDependency`) for automatic DB lookups from path or body parameters
|
||||
- **Fixtures**: Fixture system with dependency management, context support, and pytest integration
|
||||
- **Model Mixins**: SQLAlchemy mixins for common column patterns (`UUIDMixin`, `UUIDv7Mixin`, `CreatedAtMixin`, `UpdatedAtMixin`, `TimestampMixin`) and lifecycle callbacks (`WatchedFieldsMixin`) that fire after commit for insert, update, and delete events.
|
||||
- **Model Mixins**: SQLAlchemy mixins for common column patterns (`UUIDMixin`, `UUIDv7Mixin`, `CreatedAtMixin`, `UpdatedAtMixin`, `TimestampMixin`).
|
||||
- **Lifecycle Events**: Post-commit event system (`EventSession`, `listens_for`) that dispatches async/sync callbacks for insert, update, and delete operations.
|
||||
- **Standardized API Responses**: Consistent response format with `Response`, `ErrorResponse`, `PaginatedResponse`, `CursorPaginatedResponse` and `OffsetPaginatedResponse`.
|
||||
- **Exception Handling**: Structured error responses with automatic OpenAPI documentation
|
||||
- **Logging**: Logging configuration with uvicorn integration via `configure_logging` and `get_logger`
|
||||
|
||||
180
docs/migration/v3.md
Normal file
180
docs/migration/v3.md
Normal file
@@ -0,0 +1,180 @@
|
||||
# Migrating to v3.0
|
||||
|
||||
This page covers every breaking change introduced in **v3.0** and the steps required to update your code.
|
||||
|
||||
---
|
||||
|
||||
## CRUD
|
||||
|
||||
### Facet keys now always use the full relationship chain
|
||||
|
||||
In `v2`, relationship facet fields used only the terminal column key (e.g. `"name"` for `Role.name`) and only prepended the relationship name when two facet fields shared the same column key. In `v3`, facet keys **always** include the full relationship chain joined by `__`, regardless of collisions.
|
||||
|
||||
=== "Before (`v2`)"
|
||||
|
||||
```
|
||||
User.status -> status
|
||||
(User.role, Role.name) -> name
|
||||
(User.role, Role.permission, Permission.name) -> name
|
||||
```
|
||||
|
||||
=== "Now (`v3`)"
|
||||
|
||||
```
|
||||
User.status -> status
|
||||
(User.role, Role.name) -> role__name
|
||||
(User.role, Role.permission, Permission.name) -> role__permission__name
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `*_params` dependencies consolidated into per-paginate methods
|
||||
|
||||
The six individual dependency methods (`offset_params`, `cursor_params`, `paginate_params`, `filter_params`, `search_params`, `order_params`) have been **removed** and replaced by three consolidated methods that bundle pagination, search, filter, and order into a single `Depends()` call.
|
||||
|
||||
| Removed | Replacement |
|
||||
|---|---|
|
||||
| `offset_params()` + `filter_params()` + `search_params()` + `order_params()` | `offset_paginate_params()` |
|
||||
| `cursor_params()` + `filter_params()` + `search_params()` + `order_params()` | `cursor_paginate_params()` |
|
||||
| `paginate_params()` + `filter_params()` + `search_params()` + `order_params()` | `paginate_params()` |
|
||||
|
||||
Each new method accepts `search`, `filter`, and `order` boolean toggles (all `True` by default) to disable features you don't need.
|
||||
|
||||
=== "Before (`v2`)"
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.crud import OrderByClause
|
||||
|
||||
@router.get("/offset")
|
||||
async def list_articles_offset(
|
||||
session: SessionDep,
|
||||
params: Annotated[dict, Depends(ArticleCrud.offset_params(default_page_size=20))],
|
||||
filter_by: Annotated[dict, Depends(ArticleCrud.filter_params())],
|
||||
order_by: Annotated[OrderByClause | None, Depends(ArticleCrud.order_params(default_field=Article.created_at))],
|
||||
search: str | None = None,
|
||||
) -> OffsetPaginatedResponse[ArticleRead]:
|
||||
return await ArticleCrud.offset_paginate(
|
||||
session=session,
|
||||
**params,
|
||||
search=search,
|
||||
filter_by=filter_by or None,
|
||||
order_by=order_by,
|
||||
schema=ArticleRead,
|
||||
)
|
||||
```
|
||||
|
||||
=== "Now (`v3`)"
|
||||
|
||||
```python
|
||||
@router.get("/offset")
|
||||
async def list_articles_offset(
|
||||
session: SessionDep,
|
||||
params: Annotated[
|
||||
dict,
|
||||
Depends(
|
||||
ArticleCrud.offset_paginate_params(
|
||||
default_page_size=20,
|
||||
default_order_field=Article.created_at,
|
||||
)
|
||||
),
|
||||
],
|
||||
) -> OffsetPaginatedResponse[ArticleRead]:
|
||||
return await ArticleCrud.offset_paginate(session=session, **params, schema=ArticleRead)
|
||||
```
|
||||
|
||||
The same pattern applies to `cursor_paginate_params()` and `paginate_params()`. To disable a feature, pass the toggle:
|
||||
|
||||
```python
|
||||
# No search or ordering, only pagination + filtering
|
||||
ArticleCrud.offset_paginate_params(search=False, order=False)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Models
|
||||
|
||||
The lifecycle event system has been rewritten. Callbacks are now registered with a module-level [`listens_for`](../reference/models.md#fastapi_toolsets.models.listens_for) decorator and dispatched by [`EventSession`](../reference/models.md#fastapi_toolsets.models.EventSession), replacing the mixin-based approach from `v2`.
|
||||
|
||||
### `WatchedFieldsMixin` and `@watch` removed
|
||||
|
||||
Importing `WatchedFieldsMixin` or `watch` will raise `ImportError`.
|
||||
|
||||
Model method callbacks (`on_create`, `on_delete`, `on_update`) and the `@watch` decorator are replaced by:
|
||||
|
||||
1. **`__watched_fields__`** — a plain class attribute to restrict which field changes trigger `UPDATE` events (replaces `@watch`).
|
||||
2. **`@listens_for`** — a module-level decorator to register callbacks for one or more [`ModelEvent`](../reference/models.md#fastapi_toolsets.models.ModelEvent) types (replaces `on_create` / `on_delete` / `on_update` methods).
|
||||
|
||||
=== "Before (`v2`)"
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.models import WatchedFieldsMixin, watch
|
||||
|
||||
@watch("status")
|
||||
class Order(Base, UUIDMixin, WatchedFieldsMixin):
|
||||
__tablename__ = "orders"
|
||||
|
||||
status: Mapped[str]
|
||||
|
||||
async def on_create(self):
|
||||
await notify_new_order(self.id)
|
||||
|
||||
async def on_update(self, changes):
|
||||
if "status" in changes:
|
||||
await notify_status_change(self.id, changes["status"])
|
||||
|
||||
async def on_delete(self):
|
||||
await notify_order_cancelled(self.id)
|
||||
```
|
||||
|
||||
=== "Now (`v3`)"
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.models import ModelEvent, UUIDMixin, listens_for
|
||||
|
||||
class Order(Base, UUIDMixin):
|
||||
__tablename__ = "orders"
|
||||
__watched_fields__ = ("status",)
|
||||
|
||||
status: Mapped[str]
|
||||
|
||||
@listens_for(Order, [ModelEvent.CREATE])
|
||||
async def on_order_created(order: Order, event_type: ModelEvent, changes: None):
|
||||
await notify_new_order(order.id)
|
||||
|
||||
@listens_for(Order, [ModelEvent.UPDATE])
|
||||
async def on_order_updated(order: Order, event_type: ModelEvent, changes: dict):
|
||||
if "status" in changes:
|
||||
await notify_status_change(order.id, changes["status"])
|
||||
|
||||
@listens_for(Order, [ModelEvent.DELETE])
|
||||
async def on_order_deleted(order: Order, event_type: ModelEvent, changes: None):
|
||||
await notify_order_cancelled(order.id)
|
||||
```
|
||||
|
||||
### `EventSession` now required
|
||||
|
||||
Without `EventSession`, lifecycle callbacks will silently stop firing.
|
||||
|
||||
Callbacks are now dispatched inside `EventSession.commit()` rather than via background tasks. Pass it as the session class when creating your session factory:
|
||||
|
||||
=== "Before (`v2`)"
|
||||
|
||||
```python
|
||||
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||
|
||||
engine = create_async_engine("postgresql+asyncpg://...")
|
||||
SessionLocal = async_sessionmaker(engine, expire_on_commit=False)
|
||||
```
|
||||
|
||||
=== "Now (`v3`)"
|
||||
|
||||
```python
|
||||
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||
from fastapi_toolsets.models import EventSession
|
||||
|
||||
engine = create_async_engine("postgresql+asyncpg://...")
|
||||
SessionLocal = async_sessionmaker(engine, expire_on_commit=False, class_=EventSession)
|
||||
```
|
||||
|
||||
!!! note
|
||||
If you use `create_db_session` from `fastapi_toolsets.pytest`, the session already uses `EventSession` — no changes needed in tests.
|
||||
@@ -159,18 +159,15 @@ Three pagination methods are available. All return a typed response whose `pagi
|
||||
### Offset pagination
|
||||
|
||||
```python
|
||||
from typing import Annotated
|
||||
from fastapi import Depends
|
||||
|
||||
@router.get("")
|
||||
async def get_users(
|
||||
session: SessionDep,
|
||||
items_per_page: int = 50,
|
||||
page: int = 1,
|
||||
params: Annotated[dict, Depends(UserCrud.offset_paginate_params())],
|
||||
) -> OffsetPaginatedResponse[UserRead]:
|
||||
return await UserCrud.offset_paginate(
|
||||
session=session,
|
||||
items_per_page=items_per_page,
|
||||
page=page,
|
||||
schema=UserRead,
|
||||
)
|
||||
return await UserCrud.offset_paginate(session=session, **params, schema=UserRead)
|
||||
```
|
||||
|
||||
The [`offset_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.offset_paginate) method returns an [`OffsetPaginatedResponse`](../reference/schemas.md#fastapi_toolsets.schemas.OffsetPaginatedResponse):
|
||||
@@ -182,6 +179,7 @@ The [`offset_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.Async
|
||||
"data": ["..."],
|
||||
"pagination": {
|
||||
"total_count": 100,
|
||||
"pages": 5,
|
||||
"page": 1,
|
||||
"items_per_page": 20,
|
||||
"has_more": true
|
||||
@@ -189,21 +187,30 @@ The [`offset_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.Async
|
||||
}
|
||||
```
|
||||
|
||||
#### Skipping the COUNT query
|
||||
|
||||
!!! info "Added in `v2.4.1`"
|
||||
|
||||
By default `offset_paginate` runs two queries: one for the page items and one `COUNT(*)` for `total_count`. On large tables the `COUNT` can be expensive. Pass `include_total=False` to `offset_paginate_params()` to skip it:
|
||||
|
||||
```python
|
||||
@router.get("")
|
||||
async def get_users(
|
||||
session: SessionDep,
|
||||
params: Annotated[dict, Depends(UserCrud.offset_paginate_params(include_total=False))],
|
||||
) -> OffsetPaginatedResponse[UserRead]:
|
||||
return await UserCrud.offset_paginate(session=session, **params, schema=UserRead)
|
||||
```
|
||||
|
||||
### Cursor pagination
|
||||
|
||||
```python
|
||||
@router.get("")
|
||||
async def list_users(
|
||||
session: SessionDep,
|
||||
cursor: str | None = None,
|
||||
items_per_page: int = 20,
|
||||
params: Annotated[dict, Depends(UserCrud.cursor_paginate_params())],
|
||||
) -> CursorPaginatedResponse[UserRead]:
|
||||
return await UserCrud.cursor_paginate(
|
||||
session=session,
|
||||
cursor=cursor,
|
||||
items_per_page=items_per_page,
|
||||
schema=UserRead,
|
||||
)
|
||||
return await UserCrud.cursor_paginate(session=session, **params, schema=UserRead)
|
||||
```
|
||||
|
||||
The [`cursor_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.cursor_paginate) method returns a [`CursorPaginatedResponse`](../reference/schemas.md#fastapi_toolsets.schemas.CursorPaginatedResponse):
|
||||
@@ -238,7 +245,7 @@ The cursor column is set once on [`CrudFactory`](../reference/crud.md#fastapi_to
|
||||
!!! note
|
||||
`cursor_column` is required. Calling [`cursor_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.cursor_paginate) on a CRUD class that has no `cursor_column` configured raises a `ValueError`.
|
||||
|
||||
The cursor value is base64-encoded when returned to the client and decoded back to the correct Python type on the next request. The following SQLAlchemy column types are supported:
|
||||
The cursor value is URL-safe base64-encoded (no padding) when returned to the client and decoded back to the correct Python type on the next request. The following SQLAlchemy column types are supported:
|
||||
|
||||
| SQLAlchemy type | Python type |
|
||||
|---|---|
|
||||
@@ -263,25 +270,14 @@ PostCrud = CrudFactory(model=Post, cursor_column=Post.created_at)
|
||||
[`paginate()`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.paginate) dispatches to `offset_paginate` or `cursor_paginate` based on a `pagination_type` query parameter, letting you expose **one endpoint** that supports both strategies. The `pagination_type` field in the response tells clients which strategy was used, enabling frontend discriminated-union typing.
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.crud import PaginationType
|
||||
from fastapi_toolsets.schemas import PaginatedResponse
|
||||
|
||||
@router.get("")
|
||||
async def list_users(
|
||||
session: SessionDep,
|
||||
pagination_type: PaginationType = PaginationType.OFFSET,
|
||||
page: int = Query(1, ge=1, description="Current page (offset only)"),
|
||||
cursor: str | None = Query(None, description="Cursor token (cursor only)"),
|
||||
items_per_page: int = Query(20, ge=1, le=100),
|
||||
params: Annotated[dict, Depends(UserCrud.paginate_params())],
|
||||
) -> PaginatedResponse[UserRead]:
|
||||
return await UserCrud.paginate(
|
||||
session,
|
||||
pagination_type=pagination_type,
|
||||
page=page,
|
||||
cursor=cursor,
|
||||
items_per_page=items_per_page,
|
||||
schema=UserRead,
|
||||
)
|
||||
return await UserCrud.paginate(session, **params, schema=UserRead)
|
||||
```
|
||||
|
||||
```
|
||||
@@ -289,8 +285,6 @@ GET /users?pagination_type=offset&page=2&items_per_page=10
|
||||
GET /users?pagination_type=cursor&cursor=eyJ2YWx1ZSI6...&items_per_page=10
|
||||
```
|
||||
|
||||
Both `page` and `cursor` are always accepted by the endpoint — unused parameters are silently ignored by `paginate()`.
|
||||
|
||||
## Search
|
||||
|
||||
Two search strategies are available, both compatible with [`offset_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.offset_paginate) and [`cursor_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.cursor_paginate).
|
||||
@@ -330,49 +324,63 @@ result = await UserCrud.offset_paginate(
|
||||
)
|
||||
```
|
||||
|
||||
Or via the dependency to narrow which fields are exposed as query parameters:
|
||||
|
||||
```python
|
||||
params = UserCrud.offset_paginate_params(search_fields=[Post.title])
|
||||
```
|
||||
|
||||
This allows searching with both [`offset_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.offset_paginate) and [`cursor_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.cursor_paginate):
|
||||
|
||||
```python
|
||||
@router.get("")
|
||||
async def get_users(
|
||||
session: SessionDep,
|
||||
items_per_page: int = 50,
|
||||
page: int = 1,
|
||||
search: str | None = None,
|
||||
params: Annotated[dict, Depends(UserCrud.offset_paginate_params())],
|
||||
) -> OffsetPaginatedResponse[UserRead]:
|
||||
return await UserCrud.offset_paginate(
|
||||
session=session,
|
||||
items_per_page=items_per_page,
|
||||
page=page,
|
||||
search=search,
|
||||
schema=UserRead,
|
||||
)
|
||||
return await UserCrud.offset_paginate(session=session, **params, schema=UserRead)
|
||||
```
|
||||
|
||||
```python
|
||||
@router.get("")
|
||||
async def get_users(
|
||||
session: SessionDep,
|
||||
cursor: str | None = None,
|
||||
items_per_page: int = 50,
|
||||
search: str | None = None,
|
||||
params: Annotated[dict, Depends(UserCrud.cursor_paginate_params())],
|
||||
) -> CursorPaginatedResponse[UserRead]:
|
||||
return await UserCrud.cursor_paginate(
|
||||
session=session,
|
||||
items_per_page=items_per_page,
|
||||
cursor=cursor,
|
||||
search=search,
|
||||
schema=UserRead,
|
||||
)
|
||||
return await UserCrud.cursor_paginate(session=session, **params, schema=UserRead)
|
||||
```
|
||||
|
||||
The dependency adds two query parameters to the endpoint:
|
||||
|
||||
| Parameter | Type |
|
||||
| --------------- | ------------- |
|
||||
| `search` | `str \| null` |
|
||||
| `search_column` | `str \| null` |
|
||||
|
||||
```
|
||||
GET /posts?search=hello → search all configured columns
|
||||
GET /posts?search=hello&search_column=title → search only Post.title
|
||||
```
|
||||
|
||||
The available search column keys are returned in the `search_columns` field of [`PaginatedResponse`](../reference/schemas.md#fastapi_toolsets.schemas.PaginatedResponse). Use them to populate a column picker in the UI, or to validate `search_column` values on the client side:
|
||||
|
||||
```json
|
||||
{
|
||||
"status": "SUCCESS",
|
||||
"data": ["..."],
|
||||
"pagination": { "..." },
|
||||
"search_columns": ["content", "author__username", "title"]
|
||||
}
|
||||
```
|
||||
|
||||
!!! info "Key format uses `__` as a separator for relationship chains."
|
||||
A direct column `Post.title` produces `"title"`. A relationship tuple `(Post.author, User.username)` produces `"author__username"`. An unknown `search_column` value raises [`InvalidSearchColumnError`](../reference/exceptions.md#fastapi_toolsets.exceptions.exceptions.InvalidSearchColumnError) (HTTP 422).
|
||||
|
||||
### Faceted search
|
||||
|
||||
!!! info "Added in `v1.2`"
|
||||
|
||||
Declare `facet_fields` on the CRUD class to return distinct column values alongside paginated results. This is useful for populating filter dropdowns or building faceted search UIs.
|
||||
|
||||
Facet fields use the same syntax as `searchable_fields` — direct columns or relationship tuples:
|
||||
Declare `facet_fields` on the CRUD class to return distinct column values alongside paginated results. This is useful for populating filter dropdowns or building faceted search UIs. Relationship traversal is supported via tuples, using the same syntax as `searchable_fields`:
|
||||
|
||||
```python
|
||||
UserCrud = CrudFactory(
|
||||
@@ -394,7 +402,47 @@ result = await UserCrud.offset_paginate(
|
||||
)
|
||||
```
|
||||
|
||||
The distinct values are returned in the `filter_attributes` field of [`PaginatedResponse`](../reference/schemas.md#fastapi_toolsets.schemas.PaginatedResponse):
|
||||
Or via the dependency to narrow which fields are exposed as query parameters:
|
||||
|
||||
```python
|
||||
params = UserCrud.offset_paginate_params(facet_fields=[User.country])
|
||||
```
|
||||
|
||||
Facet filtering is built into the consolidated params dependencies. When `filter=True` (the default), each facet field is exposed as a query parameter and values are collected into `filter_by` automatically:
|
||||
|
||||
```python
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import Depends
|
||||
|
||||
@router.get("", response_model_exclude_none=True)
|
||||
async def list_users(
|
||||
session: SessionDep,
|
||||
params: Annotated[dict, Depends(UserCrud.offset_paginate_params())],
|
||||
) -> OffsetPaginatedResponse[UserRead]:
|
||||
return await UserCrud.offset_paginate(session=session, **params, schema=UserRead)
|
||||
```
|
||||
|
||||
```python
|
||||
@router.get("", response_model_exclude_none=True)
|
||||
async def list_users(
|
||||
session: SessionDep,
|
||||
params: Annotated[dict, Depends(UserCrud.cursor_paginate_params())],
|
||||
) -> CursorPaginatedResponse[UserRead]:
|
||||
return await UserCrud.cursor_paginate(session=session, **params, schema=UserRead)
|
||||
```
|
||||
|
||||
Both single-value and multi-value query parameters work:
|
||||
|
||||
```
|
||||
GET /users?status=active → filter_by={"status": ["active"]}
|
||||
GET /users?status=active&country=FR → filter_by={"status": ["active"], "country": ["FR"]}
|
||||
GET /users?role__name=admin&role__name=editor → filter_by={"role__name": ["admin", "editor"]} (IN clause)
|
||||
```
|
||||
|
||||
`filter_by` and `filters` can be combined — both are applied with AND logic.
|
||||
|
||||
The distinct values for each facet field are returned in the `filter_attributes` field of [`PaginatedResponse`](../reference/schemas.md#fastapi_toolsets.schemas.PaginatedResponse). Use them to populate filter dropdowns in the UI, or to validate `filter_by` keys on the client side:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -404,57 +452,19 @@ The distinct values are returned in the `filter_attributes` field of [`Paginated
|
||||
"filter_attributes": {
|
||||
"status": ["active", "inactive"],
|
||||
"country": ["DE", "FR", "US"],
|
||||
"name": ["admin", "editor", "viewer"]
|
||||
"role__name": ["admin", "editor", "viewer"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Use `filter_by` to pass the client's chosen filter values directly — no need to build SQLAlchemy conditions by hand. Any unknown key raises [`InvalidFacetFilterError`](../reference/exceptions.md#fastapi_toolsets.exceptions.exceptions.InvalidFacetFilterError).
|
||||
|
||||
!!! info "The keys in `filter_by` are the same keys the client received in `filter_attributes`."
|
||||
Keys are normally the terminal `column.key` (e.g. `"name"` for `Role.name`). When two facet fields share the same column key (e.g. `(Build.project, Project.name)` and `(Build.os, Os.name)`), the relationship name is prepended automatically: `"project__name"` and `"os__name"`.
|
||||
|
||||
`filter_by` and `filters` can be combined — both are applied with AND logic.
|
||||
|
||||
Use [`filter_params()`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.filter_params) to generate a dict with the facet filter values from the query parameters:
|
||||
|
||||
```python
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import Depends
|
||||
|
||||
UserCrud = CrudFactory(
|
||||
model=User,
|
||||
facet_fields=[User.status, User.country, (User.role, Role.name)],
|
||||
)
|
||||
|
||||
@router.get("", response_model_exclude_none=True)
|
||||
async def list_users(
|
||||
session: SessionDep,
|
||||
page: int = 1,
|
||||
filter_by: Annotated[dict[str, list[str]], Depends(UserCrud.filter_params())],
|
||||
) -> OffsetPaginatedResponse[UserRead]:
|
||||
return await UserCrud.offset_paginate(
|
||||
session=session,
|
||||
page=page,
|
||||
filter_by=filter_by,
|
||||
schema=UserRead,
|
||||
)
|
||||
```
|
||||
|
||||
Both single-value and multi-value query parameters work:
|
||||
|
||||
```
|
||||
GET /users?status=active → filter_by={"status": ["active"]}
|
||||
GET /users?status=active&country=FR → filter_by={"status": ["active"], "country": ["FR"]}
|
||||
GET /users?role=admin&role=editor → filter_by={"role": ["admin", "editor"]} (IN clause)
|
||||
```
|
||||
!!! info "Key format uses `__` as a separator for relationship chains."
|
||||
A direct column `User.status` produces `"status"`. A relationship tuple `(User.role, Role.name)` produces `"role__name"`. A deeper chain `(User.role, Role.permission, Permission.name)` produces `"role__permission__name"`. An unknown `filter_by` key raises [`InvalidFacetFilterError`](../reference/exceptions.md#fastapi_toolsets.exceptions.exceptions.InvalidFacetFilterError) (HTTP 422).
|
||||
|
||||
## Sorting
|
||||
|
||||
!!! info "Added in `v1.3`"
|
||||
|
||||
Declare `order_fields` on the CRUD class to expose client-driven column ordering via `order_by` and `order` query parameters.
|
||||
Declare `order_fields` on the CRUD class. Relationship traversal is supported via tuples, using the same syntax as `searchable_fields` and `facet_fields`:
|
||||
|
||||
```python
|
||||
UserCrud = CrudFactory(
|
||||
@@ -462,46 +472,80 @@ UserCrud = CrudFactory(
|
||||
order_fields=[
|
||||
User.name,
|
||||
User.created_at,
|
||||
(User.role, Role.name), # sort by a related model column
|
||||
],
|
||||
)
|
||||
```
|
||||
|
||||
Call [`order_params()`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.order_params) to generate a FastAPI dependency that maps the query parameters to an [`OrderByClause`](../reference/crud.md#fastapi_toolsets.crud.factory.OrderByClause) expression:
|
||||
You can override `order_fields` per call:
|
||||
|
||||
```python
|
||||
result = await UserCrud.offset_paginate(
|
||||
session=session,
|
||||
order_fields=[User.name],
|
||||
)
|
||||
```
|
||||
|
||||
Or via the dependency to narrow which fields are exposed as query parameters:
|
||||
|
||||
```python
|
||||
params = UserCrud.offset_paginate_params(order_fields=[User.name])
|
||||
```
|
||||
|
||||
Sorting is built into the consolidated params dependencies. When `order=True` (the default), `order_by` and `order` query parameters are exposed and resolved into an `OrderByClause` automatically:
|
||||
|
||||
```python
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import Depends
|
||||
from fastapi_toolsets.crud import OrderByClause
|
||||
|
||||
@router.get("")
|
||||
async def list_users(
|
||||
session: SessionDep,
|
||||
order_by: Annotated[OrderByClause | None, Depends(UserCrud.order_params())],
|
||||
params: Annotated[dict, Depends(UserCrud.offset_paginate_params())],
|
||||
) -> OffsetPaginatedResponse[UserRead]:
|
||||
return await UserCrud.offset_paginate(session=session, order_by=order_by, schema=UserRead)
|
||||
return await UserCrud.offset_paginate(session=session, **params, schema=UserRead)
|
||||
```
|
||||
|
||||
```python
|
||||
@router.get("")
|
||||
async def list_users(
|
||||
session: SessionDep,
|
||||
params: Annotated[dict, Depends(UserCrud.cursor_paginate_params())],
|
||||
) -> CursorPaginatedResponse[UserRead]:
|
||||
return await UserCrud.cursor_paginate(session=session, **params, schema=UserRead)
|
||||
```
|
||||
|
||||
The dependency adds two query parameters to the endpoint:
|
||||
|
||||
| Parameter | Type |
|
||||
| ---------- | --------------- |
|
||||
| `order_by` | `str | null` |
|
||||
| `order_by` | `str \| null` |
|
||||
| `order` | `asc` or `desc` |
|
||||
|
||||
```
|
||||
GET /users?order_by=name&order=asc → ORDER BY users.name ASC
|
||||
GET /users?order_by=name&order=desc → ORDER BY users.name DESC
|
||||
GET /users?order_by=name&order=asc → ORDER BY users.name ASC
|
||||
GET /users?order_by=role__name&order=desc → LEFT JOIN roles ON ... ORDER BY roles.name DESC
|
||||
```
|
||||
|
||||
An unknown `order_by` value raises [`InvalidOrderFieldError`](../reference/exceptions.md#fastapi_toolsets.exceptions.exceptions.InvalidOrderFieldError) (HTTP 422).
|
||||
!!! info "Relationship tuples are joined automatically."
|
||||
When a relation field is selected, the related table is LEFT OUTER JOINed automatically. An unknown `order_by` value raises [`InvalidOrderFieldError`](../reference/exceptions.md#fastapi_toolsets.exceptions.exceptions.InvalidOrderFieldError) (HTTP 422).
|
||||
|
||||
You can also pass `order_fields` directly to `order_params()` to override the class-level defaults without modifying them:
|
||||
|
||||
```python
|
||||
UserOrderParams = UserCrud.order_params(order_fields=[User.name])
|
||||
The available sort keys are returned in the `order_columns` field of [`PaginatedResponse`](../reference/schemas.md#fastapi_toolsets.schemas.PaginatedResponse). Use them to populate a sort picker in the UI, or to validate `order_by` values on the client side:
|
||||
|
||||
```json
|
||||
{
|
||||
"status": "SUCCESS",
|
||||
"data": ["..."],
|
||||
"pagination": { "..." },
|
||||
"order_columns": ["created_at", "name", "role__name"]
|
||||
}
|
||||
```
|
||||
|
||||
!!! info "Key format uses `__` as a separator for relationship chains."
|
||||
A direct column `User.name` produces `"name"`. A relationship tuple `(User.role, Role.name)` produces `"role__name"`.
|
||||
|
||||
## Relationship loading
|
||||
|
||||
!!! info "Added in `v1.1`"
|
||||
@@ -586,12 +630,11 @@ async def get_user(session: SessionDep, uuid: UUID) -> Response[UserRead]:
|
||||
)
|
||||
|
||||
@router.get("")
|
||||
async def list_users(session: SessionDep, page: int = 1) -> OffsetPaginatedResponse[UserRead]:
|
||||
return await crud.UserCrud.offset_paginate(
|
||||
session=session,
|
||||
page=page,
|
||||
schema=UserRead,
|
||||
)
|
||||
async def list_users(
|
||||
session: SessionDep,
|
||||
params: Annotated[dict, Depends(crud.UserCrud.offset_paginate_params())],
|
||||
) -> OffsetPaginatedResponse[UserRead]:
|
||||
return await crud.UserCrud.offset_paginate(session=session, **params, schema=UserRead)
|
||||
```
|
||||
|
||||
The schema must have `from_attributes=True` (or inherit from [`PydanticBase`](../reference/schemas.md#fastapi_toolsets.schemas.PydanticBase)) so it can be built from SQLAlchemy model instances.
|
||||
|
||||
@@ -38,18 +38,20 @@ By context with [`load_fixtures_by_context`](../reference/fixtures.md#fastapi_to
|
||||
from fastapi_toolsets.fixtures import load_fixtures_by_context
|
||||
|
||||
async with db_context() as session:
|
||||
await load_fixtures_by_context(session=session, registry=fixtures, context=Context.TESTING)
|
||||
await load_fixtures_by_context(session, fixtures, Context.TESTING)
|
||||
```
|
||||
|
||||
Directly with [`load_fixtures`](../reference/fixtures.md#fastapi_toolsets.fixtures.utils.load_fixtures):
|
||||
Directly by name with [`load_fixtures`](../reference/fixtures.md#fastapi_toolsets.fixtures.utils.load_fixtures):
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.fixtures import load_fixtures
|
||||
|
||||
async with db_context() as session:
|
||||
await load_fixtures(session=session, registry=fixtures)
|
||||
await load_fixtures(session, fixtures, "roles", "test_users")
|
||||
```
|
||||
|
||||
Both functions return a `dict[str, list[...]]` mapping each fixture name to the list of loaded instances.
|
||||
|
||||
## Contexts
|
||||
|
||||
[`Context`](../reference/fixtures.md#fastapi_toolsets.fixtures.enum.Context) is an enum with predefined values:
|
||||
@@ -58,10 +60,60 @@ async with db_context() as session:
|
||||
|---------|-------------|
|
||||
| `Context.BASE` | Core data required in all environments |
|
||||
| `Context.TESTING` | Data only loaded during tests |
|
||||
| `Context.DEVELOPMENT` | Data only loaded in development |
|
||||
| `Context.PRODUCTION` | Data only loaded in production |
|
||||
|
||||
A fixture with no `contexts` defined takes `Context.BASE` by default.
|
||||
|
||||
### Custom contexts
|
||||
|
||||
Plain strings and any `Enum` subclass are accepted wherever a `Context` enum is expected.
|
||||
|
||||
```python
|
||||
from enum import Enum
|
||||
|
||||
class AppContext(str, Enum):
|
||||
STAGING = "staging"
|
||||
DEMO = "demo"
|
||||
|
||||
@fixtures.register(contexts=[AppContext.STAGING])
|
||||
def staging_data():
|
||||
return [Config(key="feature_x", enabled=True)]
|
||||
|
||||
await load_fixtures_by_context(session, fixtures, AppContext.STAGING)
|
||||
```
|
||||
|
||||
### Default context for a registry
|
||||
|
||||
Pass `contexts` to `FixtureRegistry` to set a default for all fixtures registered in it:
|
||||
|
||||
```python
|
||||
testing_registry = FixtureRegistry(contexts=[Context.TESTING])
|
||||
|
||||
@testing_registry.register # implicitly contexts=[Context.TESTING]
|
||||
def test_orders():
|
||||
return [Order(id=1, total=99)]
|
||||
```
|
||||
|
||||
### Same fixture name, multiple context variants
|
||||
|
||||
The same fixture name may be registered under different (non-overlapping) context sets. When multiple contexts are loaded together, all matching variants are merged:
|
||||
|
||||
```python
|
||||
@fixtures.register(contexts=[Context.BASE])
|
||||
def users():
|
||||
return [User(id=1, username="admin")]
|
||||
|
||||
@fixtures.register(contexts=[Context.TESTING])
|
||||
def users():
|
||||
return [User(id=2, username="tester")]
|
||||
|
||||
# loads both admin and tester
|
||||
await load_fixtures_by_context(session, fixtures, Context.BASE, Context.TESTING)
|
||||
```
|
||||
|
||||
Registering two variants with overlapping context sets raises `ValueError`.
|
||||
|
||||
## Load strategies
|
||||
|
||||
[`LoadStrategy`](../reference/fixtures.md#fastapi_toolsets.fixtures.enum.LoadStrategy) controls how the fixture loader handles rows that already exist:
|
||||
@@ -69,20 +121,44 @@ A fixture with no `contexts` defined takes `Context.BASE` by default.
|
||||
| Strategy | Description |
|
||||
|----------|-------------|
|
||||
| `LoadStrategy.INSERT` | Insert only, fail on duplicates |
|
||||
| `LoadStrategy.UPSERT` | Insert or update on conflict |
|
||||
| `LoadStrategy.SKIP` | Skip rows that already exist |
|
||||
| `LoadStrategy.MERGE` | Insert or update on conflict (default) |
|
||||
| `LoadStrategy.SKIP_EXISTING` | Skip rows that already exist |
|
||||
|
||||
```python
|
||||
await load_fixtures_by_context(
|
||||
session, fixtures, Context.BASE, strategy=LoadStrategy.SKIP_EXISTING
|
||||
)
|
||||
```
|
||||
|
||||
## Merging registries
|
||||
|
||||
Split fixtures definitions across modules and merge them:
|
||||
Split fixture definitions across modules and merge them:
|
||||
|
||||
```python
|
||||
from myapp.fixtures.dev import dev_fixtures
|
||||
from myapp.fixtures.prod import prod_fixtures
|
||||
|
||||
fixtures = fixturesRegistry()
|
||||
fixtures = FixtureRegistry()
|
||||
fixtures.include_registry(registry=dev_fixtures)
|
||||
fixtures.include_registry(registry=prod_fixtures)
|
||||
```
|
||||
|
||||
Fixtures with the same name are allowed as long as their context sets do not overlap. Conflicting contexts raise `ValueError`.
|
||||
|
||||
## Looking up fixture instances
|
||||
|
||||
[`get_obj_by_attr`](../reference/fixtures.md#fastapi_toolsets.fixtures.utils.get_obj_by_attr) retrieves a specific instance from a fixture function by attribute value — useful when building cross-fixture `depends_on` relationships:
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.fixtures import get_obj_by_attr
|
||||
|
||||
@fixtures.register(depends_on=["roles"])
|
||||
def users():
|
||||
admin_role = get_obj_by_attr(roles, "name", "admin")
|
||||
return [User(id=1, username="alice", role_id=admin_role.id)]
|
||||
```
|
||||
|
||||
Raises `StopIteration` if no matching instance is found.
|
||||
|
||||
## Pytest integration
|
||||
|
||||
@@ -111,7 +187,6 @@ async def test_user_can_login(fixture_users: list[User], fixture_roles: list[Rol
|
||||
...
|
||||
```
|
||||
|
||||
|
||||
The load order is resolved automatically from the `depends_on` declarations in your registry. Each generated fixture receives `db_session` as a dependency and returns the list of loaded model instances.
|
||||
|
||||
## CLI integration
|
||||
|
||||
@@ -117,103 +117,118 @@ class Article(Base, UUIDMixin, TimestampMixin):
|
||||
title: Mapped[str]
|
||||
```
|
||||
|
||||
### [`WatchedFieldsMixin`](../reference/models.md#fastapi_toolsets.models.WatchedFieldsMixin)
|
||||
## Lifecycle events
|
||||
|
||||
!!! info "Added in `v2.4`"
|
||||
The event system provides lifecycle callbacks that fire **after commit**. If the transaction rolls back, no callback fires.
|
||||
|
||||
`WatchedFieldsMixin` provides lifecycle callbacks that fire **after commit** — meaning the row is durably persisted when your callback runs. If the transaction rolls back, no callback fires.
|
||||
### Setup
|
||||
|
||||
Three callbacks are available, each corresponding to a [`ModelEvent`](../reference/models.md#fastapi_toolsets.models.ModelEvent) value:
|
||||
Event dispatch requires [`EventSession`](../reference/models.md#fastapi_toolsets.models.EventSession). Pass it as the session class when creating your session factory:
|
||||
|
||||
| Callback | Event | Trigger |
|
||||
|---|---|---|
|
||||
| `on_create()` | `ModelEvent.CREATE` | After `INSERT` |
|
||||
| `on_delete()` | `ModelEvent.DELETE` | After `DELETE` |
|
||||
| `on_update(changes)` | `ModelEvent.UPDATE` | After `UPDATE` on a watched field |
|
||||
```python
|
||||
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||
from fastapi_toolsets.models import EventSession
|
||||
|
||||
Server-side defaults (e.g. `id`, `created_at`) are fully populated in all callbacks. All callbacks support both `async def` and plain `def`. Use `@watch` to restrict which fields trigger `on_update`:
|
||||
engine = create_async_engine("postgresql+asyncpg://...")
|
||||
SessionLocal = async_sessionmaker(engine, expire_on_commit=False, class_=EventSession)
|
||||
```
|
||||
|
||||
| Decorator | `on_update` behaviour |
|
||||
!!! info "Callbacks fire on `session.commit()` only — not on savepoints."
|
||||
Savepoints created by [`get_transaction`](db.md) or `begin_nested()` do **not**
|
||||
trigger callbacks. All events accumulated across flushes are dispatched once
|
||||
when the outermost `commit()` is called.
|
||||
|
||||
### Events
|
||||
|
||||
Three event types are available, each corresponding to a [`ModelEvent`](../reference/models.md#fastapi_toolsets.models.ModelEvent) value:
|
||||
|
||||
| Event | Trigger |
|
||||
|---|---|
|
||||
| `@watch("status", "role")` | Only fires when `status` or `role` changes |
|
||||
| *(no decorator)* | Fires when **any** mapped field changes |
|
||||
| `ModelEvent.CREATE` | After `INSERT` commit |
|
||||
| `ModelEvent.DELETE` | After `DELETE` commit |
|
||||
| `ModelEvent.UPDATE` | After `UPDATE` commit on a watched field |
|
||||
|
||||
#### Option 1 — catch-all with `on_event`
|
||||
!!! warning "Callbacks fire only for ORM-level changes. Rows updated via raw SQL (`UPDATE ... SET ...`) are not detected."
|
||||
|
||||
Override `on_event` to handle all event types in one place. The specific methods delegate here by default:
|
||||
### Watched fields
|
||||
|
||||
Set `__watched_fields__` on the model to restrict which field changes trigger `UPDATE` events. It must be a `tuple[str, ...]` — any other type raises `TypeError`:
|
||||
|
||||
| Class attribute | `UPDATE` behaviour |
|
||||
|---|---|
|
||||
| `__watched_fields__ = ("status", "role")` | Only fires when `status` or `role` changes |
|
||||
| *(not set)* | Fires when **any** mapped field changes |
|
||||
|
||||
`__watched_fields__` is inherited through the class hierarchy via normal Python MRO. A subclass can override it:
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.models import ModelEvent, UUIDMixin, WatchedFieldsMixin, watch
|
||||
class Order(Base, UUIDMixin):
|
||||
__watched_fields__ = ("status",)
|
||||
...
|
||||
|
||||
@watch("status")
|
||||
class Order(Base, UUIDMixin, WatchedFieldsMixin):
|
||||
class UrgentOrder(Order):
|
||||
# inherits __watched_fields__ = ("status",)
|
||||
...
|
||||
|
||||
class PriorityOrder(Order):
|
||||
__watched_fields__ = ("priority",)
|
||||
# overrides parent — UPDATE fires only for priority changes
|
||||
...
|
||||
```
|
||||
|
||||
### Registering handlers
|
||||
|
||||
Register handlers with the [`listens_for`](../reference/models.md#fastapi_toolsets.models.listens_for) decorator. Every callback receives three arguments: the model instance, the [`ModelEvent`](../reference/models.md#fastapi_toolsets.models.ModelEvent) that triggered it, and a `changes` dict (`None` for `CREATE` and `DELETE`):
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.models import ModelEvent, UUIDMixin, listens_for
|
||||
|
||||
class Order(Base, UUIDMixin):
|
||||
__tablename__ = "orders"
|
||||
__watched_fields__ = ("status",)
|
||||
|
||||
status: Mapped[str]
|
||||
|
||||
async def on_event(self, event: ModelEvent, changes: dict | None = None) -> None:
|
||||
if event == ModelEvent.CREATE:
|
||||
await notify_new_order(self.id)
|
||||
elif event == ModelEvent.DELETE:
|
||||
await notify_order_cancelled(self.id)
|
||||
elif event == ModelEvent.UPDATE:
|
||||
await notify_status_change(self.id, changes["status"])
|
||||
@listens_for(Order, [ModelEvent.CREATE])
|
||||
async def on_order_created(order: Order, event_type: ModelEvent, changes: None):
|
||||
await notify_new_order(order.id)
|
||||
|
||||
@listens_for(Order, [ModelEvent.DELETE])
|
||||
async def on_order_deleted(order: Order, event_type: ModelEvent, changes: None):
|
||||
await notify_order_cancelled(order.id)
|
||||
|
||||
@listens_for(Order, [ModelEvent.UPDATE])
|
||||
async def on_order_updated(order: Order, event_type: ModelEvent, changes: dict):
|
||||
if "status" in changes:
|
||||
await notify_status_change(order.id, changes["status"])
|
||||
```
|
||||
|
||||
#### Option 2 — targeted overrides
|
||||
Multiple handlers can be registered for the same model and event. Handlers registered on a parent class also fire for subclass instances.
|
||||
|
||||
Override individual methods for more focused logic:
|
||||
A single handler can listen for multiple events at once. When `event_types` is omitted, the handler fires for all events:
|
||||
|
||||
```python
|
||||
@watch("status")
|
||||
class Order(Base, UUIDMixin, WatchedFieldsMixin):
|
||||
__tablename__ = "orders"
|
||||
@listens_for(Order, [ModelEvent.CREATE, ModelEvent.UPDATE])
|
||||
async def on_order_changed(order: Order, event_type: ModelEvent, changes: dict | None):
|
||||
await invalidate_cache(order.id)
|
||||
|
||||
status: Mapped[str]
|
||||
|
||||
async def on_create(self) -> None:
|
||||
await notify_new_order(self.id)
|
||||
|
||||
async def on_delete(self) -> None:
|
||||
await notify_order_cancelled(self.id)
|
||||
|
||||
async def on_update(self, changes: dict) -> None:
|
||||
if "status" in changes:
|
||||
old = changes["status"]["old"]
|
||||
new = changes["status"]["new"]
|
||||
await notify_status_change(self.id, old, new)
|
||||
@listens_for(Order) # all events
|
||||
async def on_any_order_event(order: Order, event_type: ModelEvent, changes: dict | None):
|
||||
await audit_log(order.id, event_type)
|
||||
```
|
||||
|
||||
#### Field changes format
|
||||
### Field changes format
|
||||
|
||||
The `changes` dict maps each watched field that changed to `{"old": ..., "new": ...}`. Only fields that actually changed are included:
|
||||
The `changes` dict maps each watched field that changed to `{"old": ..., "new": ...}`. Only fields that actually changed are included. For `CREATE` and `DELETE` events, `changes` is `None`:
|
||||
|
||||
```python
|
||||
# CREATE / DELETE → changes is None
|
||||
# status changed → {"status": {"old": "pending", "new": "shipped"}}
|
||||
# two fields changed → {"status": {...}, "assigned_to": {...}}
|
||||
```
|
||||
|
||||
!!! info "Multiple flushes in one transaction are merged: the earliest `old` and latest `new` are preserved, and `on_update` fires only once per commit."
|
||||
|
||||
!!! warning "Callbacks fire only for ORM-level changes. Rows updated via raw SQL (`UPDATE ... SET ...`) are not detected."
|
||||
|
||||
## Composing mixins
|
||||
|
||||
All mixins can be combined in any order. The only constraint is that exactly one primary key must be defined — either via `UUIDMixin` or directly on the model.
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.models import UUIDMixin, TimestampMixin
|
||||
|
||||
class Event(Base, UUIDMixin, TimestampMixin):
|
||||
__tablename__ = "events"
|
||||
name: Mapped[str]
|
||||
|
||||
class Counter(Base, UpdatedAtMixin):
|
||||
__tablename__ = "counters"
|
||||
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
|
||||
value: Mapped[int]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
[:material-api: API Reference](../reference/models.md)
|
||||
|
||||
@@ -79,9 +79,6 @@ The examples above are already compatible with parallel test execution with `pyt
|
||||
|
||||
## Cleaning up tables
|
||||
|
||||
!!! warning
|
||||
Since `V2.1.0` `cleanup_tables` now live in `fastapi_toolsets.db`. For backward compatibility the function is still available in `fastapi_toolsets.pytest`, but this will be remove in `V3.0.0`.
|
||||
|
||||
If you want to manually clean up a database you can use [`cleanup_tables`](../reference/db.md#fastapi_toolsets.db.cleanup_tables), this will truncate all tables between tests for fast isolation:
|
||||
|
||||
```python
|
||||
|
||||
@@ -1,267 +0,0 @@
|
||||
# Security
|
||||
|
||||
Composable authentication helpers for FastAPI that use `Security()` for OpenAPI documentation and accept user-provided validator functions with full type flexibility.
|
||||
|
||||
## Overview
|
||||
|
||||
The `security` module provides four auth source classes and a `MultiAuth` factory. Each class wraps a FastAPI security scheme for OpenAPI and accepts a validator function called as:
|
||||
|
||||
```python
|
||||
await validator(credential, **kwargs)
|
||||
```
|
||||
|
||||
where `kwargs` are the extra keyword arguments provided at instantiation (roles, permissions, enums, etc.). The validator returns the authenticated identity (e.g. a `User` model) which becomes the route dependency value.
|
||||
|
||||
```python
|
||||
from fastapi import Security
|
||||
from fastapi_toolsets.security import BearerTokenAuth
|
||||
|
||||
async def verify_token(token: str, *, role: str) -> User:
|
||||
user = await db.get_by_token(token)
|
||||
if not user or user.role != role:
|
||||
raise UnauthorizedError()
|
||||
return user
|
||||
|
||||
bearer_admin = BearerTokenAuth(verify_token, role="admin")
|
||||
|
||||
@app.get("/admin")
|
||||
async def admin_route(user: User = Security(bearer_admin)):
|
||||
return user
|
||||
```
|
||||
|
||||
## Auth sources
|
||||
|
||||
### [`BearerTokenAuth`](../reference/security.md#fastapi_toolsets.security.BearerTokenAuth)
|
||||
|
||||
Reads the `Authorization: Bearer <token>` header. Wraps `HTTPBearer` for OpenAPI.
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.security import BearerTokenAuth
|
||||
|
||||
bearer = BearerTokenAuth(validator=verify_token)
|
||||
|
||||
@app.get("/me")
|
||||
async def me(user: User = Security(bearer)):
|
||||
return user
|
||||
```
|
||||
|
||||
#### Token prefix
|
||||
|
||||
The optional `prefix` parameter restricts a `BearerTokenAuth` instance to tokens
|
||||
that start with a given string. The prefix is **kept** in the value passed to the
|
||||
validator — store and compare tokens with their prefix included.
|
||||
|
||||
This lets you deploy multiple `BearerTokenAuth` instances in the same application
|
||||
and disambiguate them efficiently in `MultiAuth`:
|
||||
|
||||
```python
|
||||
user_bearer = BearerTokenAuth(verify_user, prefix="user_") # matches "Bearer user_..."
|
||||
org_bearer = BearerTokenAuth(verify_org, prefix="org_") # matches "Bearer org_..."
|
||||
```
|
||||
|
||||
Use [`generate_token()`](#token-generation) to create correctly-prefixed tokens.
|
||||
|
||||
#### Token generation
|
||||
|
||||
`BearerTokenAuth.generate_token()` produces a secure random token ready to store
|
||||
in your database and return to the client. If a prefix is configured it is
|
||||
prepended automatically:
|
||||
|
||||
```python
|
||||
bearer = BearerTokenAuth(verify_token, prefix="user_")
|
||||
|
||||
token = bearer.generate_token() # e.g. "user_Xk3mN..."
|
||||
await db.store_token(user_id, token)
|
||||
return {"access_token": token, "token_type": "bearer"}
|
||||
```
|
||||
|
||||
The client sends `Authorization: Bearer user_Xk3mN...` and the validator receives
|
||||
the full token (prefix included) to compare against the stored value.
|
||||
|
||||
### [`CookieAuth`](../reference/security.md#fastapi_toolsets.security.CookieAuth)
|
||||
|
||||
Reads a named cookie. Wraps `APIKeyCookie` for OpenAPI.
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.security import CookieAuth
|
||||
|
||||
cookie_auth = CookieAuth("session", validator=verify_session)
|
||||
|
||||
@app.get("/me")
|
||||
async def me(user: User = Security(cookie_auth)):
|
||||
return user
|
||||
```
|
||||
|
||||
### [`OAuth2Auth`](../reference/security.md#fastapi_toolsets.security.OAuth2Auth)
|
||||
|
||||
Reads the `Authorization: Bearer <token>` header and registers the token endpoint
|
||||
in OpenAPI via `OAuth2PasswordBearer`.
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.security import OAuth2Auth
|
||||
|
||||
oauth2_auth = OAuth2Auth(token_url="/token", validator=verify_token)
|
||||
|
||||
@app.get("/me")
|
||||
async def me(user: User = Security(oauth2_auth)):
|
||||
return user
|
||||
```
|
||||
|
||||
### [`OpenIDAuth`](../reference/security.md#fastapi_toolsets.security.OpenIDAuth)
|
||||
|
||||
Reads the `Authorization: Bearer <token>` header and registers the OpenID Connect
|
||||
discovery URL in OpenAPI via `OpenIdConnect`. Token validation is fully delegated
|
||||
to your validator — use any OIDC / JWT library (`authlib`, `python-jose`, `PyJWT`).
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.security import OpenIDAuth
|
||||
|
||||
async def verify_google_token(token: str, *, audience: str) -> User:
|
||||
payload = jwt.decode(token, google_public_keys, algorithms=["RS256"],
|
||||
audience=audience)
|
||||
return User(email=payload["email"], name=payload["name"])
|
||||
|
||||
google_auth = OpenIDAuth(
|
||||
"https://accounts.google.com/.well-known/openid-configuration",
|
||||
verify_google_token,
|
||||
audience="my-client-id",
|
||||
)
|
||||
|
||||
@app.get("/me")
|
||||
async def me(user: User = Security(google_auth)):
|
||||
return user
|
||||
```
|
||||
|
||||
The discovery URL is used **only for OpenAPI documentation** — no requests are made
|
||||
to it by this class. You are responsible for fetching and caching the provider's
|
||||
public keys in your validator.
|
||||
|
||||
Multiple providers work naturally with `MultiAuth`:
|
||||
|
||||
```python
|
||||
multi = MultiAuth(google_auth, github_auth)
|
||||
|
||||
@app.get("/data")
|
||||
async def data(user: User = Security(multi)):
|
||||
return user
|
||||
```
|
||||
|
||||
## Typed validator kwargs
|
||||
|
||||
All auth classes forward extra instantiation keyword arguments to the validator.
|
||||
Arguments can be any type — enums, strings, integers, etc. The validator returns
|
||||
the authenticated identity, which FastAPI injects directly into the route handler.
|
||||
|
||||
```python
|
||||
async def verify_token(token: str, *, role: Role, permission: str) -> User:
|
||||
user = await decode_token(token)
|
||||
if user.role != role or permission not in user.permissions:
|
||||
raise UnauthorizedError()
|
||||
return user
|
||||
|
||||
bearer = BearerTokenAuth(verify_token, role=Role.ADMIN, permission="billing:read")
|
||||
```
|
||||
|
||||
Each auth instance is self-contained — create a separate instance per distinct
|
||||
requirement instead of passing requirements through `Security(scopes=[...])`.
|
||||
|
||||
### Using `.require()` inline
|
||||
|
||||
If declaring a new top-level variable per role feels verbose, use `.require()` to
|
||||
create a configured clone directly in the route decorator. The original instance
|
||||
is not mutated:
|
||||
|
||||
```python
|
||||
bearer = BearerTokenAuth(verify_token)
|
||||
|
||||
@app.get("/admin/stats")
|
||||
async def admin_stats(user: User = Security(bearer.require(role=Role.ADMIN))):
|
||||
return {"message": f"Hello admin {user.name}"}
|
||||
|
||||
@app.get("/profile")
|
||||
async def profile(user: User = Security(bearer.require(role=Role.USER))):
|
||||
return {"id": user.id, "name": user.name}
|
||||
```
|
||||
|
||||
`.require()` kwargs are merged over existing ones — new values win on conflict.
|
||||
The `prefix` (for `BearerTokenAuth`) and cookie name (for `CookieAuth`) are
|
||||
always preserved.
|
||||
|
||||
`.require()` instances work transparently inside `MultiAuth`:
|
||||
|
||||
```python
|
||||
multi = MultiAuth(
|
||||
user_bearer.require(role=Role.USER),
|
||||
org_bearer.require(role=Role.ADMIN),
|
||||
)
|
||||
```
|
||||
|
||||
## MultiAuth
|
||||
|
||||
[`MultiAuth`](../reference/security.md#fastapi_toolsets.security.MultiAuth) combines
|
||||
multiple auth sources into a single callable. Sources are tried in order; the
|
||||
first one that finds a credential wins.
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.security import MultiAuth
|
||||
|
||||
multi = MultiAuth(user_bearer, org_bearer, cookie_auth)
|
||||
|
||||
@app.get("/data")
|
||||
async def data_route(user = Security(multi)):
|
||||
return user
|
||||
```
|
||||
|
||||
### Using `.require()` on MultiAuth
|
||||
|
||||
`MultiAuth` also supports `.require()`, which propagates the kwargs to every
|
||||
source that implements it. Sources that do not (e.g. custom `AuthSource`
|
||||
subclasses) are passed through unchanged:
|
||||
|
||||
```python
|
||||
multi = MultiAuth(bearer, cookie)
|
||||
|
||||
@app.get("/admin")
|
||||
async def admin(user: User = Security(multi.require(role=Role.ADMIN))):
|
||||
return user
|
||||
```
|
||||
|
||||
This is equivalent to calling `.require()` on each source individually:
|
||||
|
||||
```python
|
||||
# These two are identical
|
||||
multi.require(role=Role.ADMIN)
|
||||
|
||||
MultiAuth(
|
||||
bearer.require(role=Role.ADMIN),
|
||||
cookie.require(role=Role.ADMIN),
|
||||
)
|
||||
```
|
||||
|
||||
### Prefix-based dispatch
|
||||
|
||||
Because `extract()` is pure string matching (no I/O), prefix-based source
|
||||
selection is essentially free. Only the matching source's validator (which may
|
||||
involve DB or network I/O) is ever called:
|
||||
|
||||
```python
|
||||
user_bearer = BearerTokenAuth(verify_user, prefix="user_")
|
||||
org_bearer = BearerTokenAuth(verify_org, prefix="org_")
|
||||
|
||||
multi = MultiAuth(user_bearer, org_bearer)
|
||||
|
||||
# "Bearer user_alice" → only verify_user runs, receives "user_alice"
|
||||
# "Bearer org_acme" → only verify_org runs, receives "org_acme"
|
||||
```
|
||||
|
||||
Tokens are stored and compared **with their prefix** — use `generate_token()` on
|
||||
each source to issue correctly-prefixed tokens:
|
||||
|
||||
```python
|
||||
user_token = user_bearer.generate_token() # "user_..."
|
||||
org_token = org_bearer.generate_token() # "org_..."
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
[:material-api: API Reference](../reference/security.md)
|
||||
@@ -6,17 +6,19 @@ You can import them directly from `fastapi_toolsets.models`:
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.models import (
|
||||
EventSession,
|
||||
ModelEvent,
|
||||
UUIDMixin,
|
||||
UUIDv7Mixin,
|
||||
CreatedAtMixin,
|
||||
UpdatedAtMixin,
|
||||
TimestampMixin,
|
||||
WatchedFieldsMixin,
|
||||
watch,
|
||||
listens_for,
|
||||
)
|
||||
```
|
||||
|
||||
## ::: fastapi_toolsets.models.EventSession
|
||||
|
||||
## ::: fastapi_toolsets.models.ModelEvent
|
||||
|
||||
## ::: fastapi_toolsets.models.UUIDMixin
|
||||
@@ -29,6 +31,4 @@ from fastapi_toolsets.models import (
|
||||
|
||||
## ::: fastapi_toolsets.models.TimestampMixin
|
||||
|
||||
## ::: fastapi_toolsets.models.WatchedFieldsMixin
|
||||
|
||||
## ::: fastapi_toolsets.models.watch
|
||||
## ::: fastapi_toolsets.models.listens_for
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
# `security`
|
||||
|
||||
Here's the reference for the authentication helpers provided by the `security` module.
|
||||
|
||||
You can import them directly from `fastapi_toolsets.security`:
|
||||
|
||||
```python
|
||||
from fastapi_toolsets.security import (
|
||||
AuthSource,
|
||||
BearerTokenAuth,
|
||||
CookieAuth,
|
||||
OAuth2Auth,
|
||||
OpenIDAuth,
|
||||
MultiAuth,
|
||||
)
|
||||
```
|
||||
|
||||
## ::: fastapi_toolsets.security.AuthSource
|
||||
|
||||
## ::: fastapi_toolsets.security.BearerTokenAuth
|
||||
|
||||
## ::: fastapi_toolsets.security.CookieAuth
|
||||
|
||||
## ::: fastapi_toolsets.security.OAuth2Auth
|
||||
|
||||
## ::: fastapi_toolsets.security.OpenIDAuth
|
||||
|
||||
## ::: fastapi_toolsets.security.MultiAuth
|
||||
@@ -1,9 +0,0 @@
|
||||
from fastapi import FastAPI
|
||||
|
||||
from fastapi_toolsets.exceptions import init_exceptions_handlers
|
||||
|
||||
from .routes import router
|
||||
|
||||
app = FastAPI()
|
||||
init_exceptions_handlers(app=app)
|
||||
app.include_router(router=router)
|
||||
@@ -1,9 +0,0 @@
|
||||
from fastapi_toolsets.crud import CrudFactory
|
||||
|
||||
from .models import OAuthAccount, OAuthProvider, Team, User, UserToken
|
||||
|
||||
TeamCrud = CrudFactory(model=Team)
|
||||
UserCrud = CrudFactory(model=User)
|
||||
UserTokenCrud = CrudFactory(model=UserToken)
|
||||
OAuthProviderCrud = CrudFactory(model=OAuthProvider)
|
||||
OAuthAccountCrud = CrudFactory(model=OAuthAccount)
|
||||
@@ -1,15 +0,0 @@
|
||||
from fastapi import Depends
|
||||
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||
|
||||
from fastapi_toolsets.db import create_db_context, create_db_dependency
|
||||
|
||||
DATABASE_URL = "postgresql+asyncpg://postgres:postgres@localhost:5432/postgres"
|
||||
|
||||
engine = create_async_engine(url=DATABASE_URL, future=True)
|
||||
async_session_maker = async_sessionmaker(bind=engine, expire_on_commit=False)
|
||||
|
||||
get_db = create_db_dependency(session_maker=async_session_maker)
|
||||
get_db_context = create_db_context(session_maker=async_session_maker)
|
||||
|
||||
|
||||
SessionDep = Depends(get_db)
|
||||
@@ -1,105 +0,0 @@
|
||||
import enum
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import (
|
||||
Boolean,
|
||||
DateTime,
|
||||
Enum,
|
||||
ForeignKey,
|
||||
Integer,
|
||||
String,
|
||||
UniqueConstraint,
|
||||
)
|
||||
from sqlalchemy.dialects.postgresql import UUID as PG_UUID
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
||||
|
||||
from fastapi_toolsets.models import TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class Base(DeclarativeBase, UUIDMixin):
|
||||
type_annotation_map = {
|
||||
str: String(),
|
||||
int: Integer(),
|
||||
UUID: PG_UUID(as_uuid=True),
|
||||
datetime: DateTime(timezone=True),
|
||||
}
|
||||
|
||||
|
||||
class UserRole(enum.Enum):
|
||||
admin = "admin"
|
||||
moderator = "moderator"
|
||||
user = "user"
|
||||
|
||||
|
||||
class Team(Base, TimestampMixin):
|
||||
__tablename__ = "teams"
|
||||
|
||||
name: Mapped[str] = mapped_column(String, unique=True, index=True)
|
||||
users: Mapped[list["User"]] = relationship(back_populates="team")
|
||||
|
||||
|
||||
class User(Base, TimestampMixin):
|
||||
__tablename__ = "users"
|
||||
|
||||
username: Mapped[str] = mapped_column(String, unique=True, index=True)
|
||||
email: Mapped[str | None] = mapped_column(
|
||||
String, unique=True, index=True, nullable=True
|
||||
)
|
||||
hashed_password: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
||||
role: Mapped[UserRole] = mapped_column(Enum(UserRole), default=UserRole.user)
|
||||
|
||||
team_id: Mapped[UUID | None] = mapped_column(ForeignKey("teams.id"), nullable=True)
|
||||
team: Mapped["Team | None"] = relationship(back_populates="users")
|
||||
oauth_accounts: Mapped[list["OAuthAccount"]] = relationship(back_populates="user")
|
||||
tokens: Mapped[list["UserToken"]] = relationship(back_populates="user")
|
||||
|
||||
|
||||
class UserToken(Base, TimestampMixin):
|
||||
"""API tokens for a user (multiple allowed)."""
|
||||
|
||||
__tablename__ = "user_tokens"
|
||||
|
||||
user_id: Mapped[UUID] = mapped_column(ForeignKey("users.id"))
|
||||
# Store hashed token value
|
||||
token_hash: Mapped[str] = mapped_column(String, unique=True, index=True)
|
||||
name: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||
expires_at: Mapped[datetime | None] = mapped_column(
|
||||
DateTime(timezone=True), nullable=True
|
||||
)
|
||||
|
||||
user: Mapped["User"] = relationship(back_populates="tokens")
|
||||
|
||||
|
||||
class OAuthProvider(Base, TimestampMixin):
|
||||
"""Configurable OAuth2 / OpenID Connect provider."""
|
||||
|
||||
__tablename__ = "oauth_providers"
|
||||
|
||||
slug: Mapped[str] = mapped_column(String, unique=True, index=True)
|
||||
name: Mapped[str] = mapped_column(String)
|
||||
client_id: Mapped[str] = mapped_column(String)
|
||||
client_secret: Mapped[str] = mapped_column(String)
|
||||
discovery_url: Mapped[str] = mapped_column(String, nullable=False)
|
||||
scopes: Mapped[str] = mapped_column(String, default="openid email profile")
|
||||
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
||||
|
||||
accounts: Mapped[list["OAuthAccount"]] = relationship(back_populates="provider")
|
||||
|
||||
|
||||
class OAuthAccount(Base, TimestampMixin):
|
||||
"""OAuth2 / OpenID Connect account linked to a user."""
|
||||
|
||||
__tablename__ = "oauth_accounts"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("provider_id", "subject", name="uq_oauth_provider_subject"),
|
||||
)
|
||||
|
||||
user_id: Mapped[UUID] = mapped_column(ForeignKey("users.id"))
|
||||
provider_id: Mapped[UUID] = mapped_column(ForeignKey("oauth_providers.id"))
|
||||
# OAuth `sub` / OpenID subject identifier
|
||||
subject: Mapped[str] = mapped_column(String)
|
||||
|
||||
user: Mapped["User"] = relationship(back_populates="oauth_accounts")
|
||||
provider: Mapped["OAuthProvider"] = relationship(back_populates="accounts")
|
||||
@@ -1,122 +0,0 @@
|
||||
from typing import Annotated
|
||||
from uuid import UUID
|
||||
|
||||
import bcrypt
|
||||
from fastapi import APIRouter, Form, HTTPException, Response, Security
|
||||
|
||||
from fastapi_toolsets.dependencies import PathDependency
|
||||
|
||||
from .crud import UserCrud, UserTokenCrud
|
||||
from .db import SessionDep
|
||||
from .models import OAuthProvider, User, UserToken
|
||||
from .schemas import (
|
||||
ApiTokenCreateRequest,
|
||||
ApiTokenResponse,
|
||||
RegisterRequest,
|
||||
UserCreate,
|
||||
UserResponse,
|
||||
)
|
||||
from .security import auth, cookie_auth, create_api_token
|
||||
|
||||
ProviderDep = PathDependency(
|
||||
model=OAuthProvider,
|
||||
field=OAuthProvider.slug,
|
||||
session_dep=SessionDep,
|
||||
param_name="slug",
|
||||
)
|
||||
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode()
|
||||
|
||||
|
||||
def verify_password(plain: str, hashed: str) -> bool:
|
||||
return bcrypt.checkpw(plain.encode(), hashed.encode())
|
||||
|
||||
|
||||
router = APIRouter(prefix="/auth")
|
||||
|
||||
|
||||
@router.post("/register", response_model=UserResponse, status_code=201)
|
||||
async def register(body: RegisterRequest, session: SessionDep):
|
||||
existing = await UserCrud.first(
|
||||
session=session, filters=[User.username == body.username]
|
||||
)
|
||||
if existing:
|
||||
raise HTTPException(status_code=409, detail="Username already taken")
|
||||
|
||||
user = await UserCrud.create(
|
||||
session=session,
|
||||
obj=UserCreate(
|
||||
username=body.username,
|
||||
email=body.email,
|
||||
hashed_password=hash_password(body.password),
|
||||
),
|
||||
)
|
||||
return user
|
||||
|
||||
|
||||
@router.post("/token", status_code=204)
|
||||
async def login(
|
||||
session: SessionDep,
|
||||
response: Response,
|
||||
username: Annotated[str, Form()],
|
||||
password: Annotated[str, Form()],
|
||||
):
|
||||
user = await UserCrud.first(session=session, filters=[User.username == username])
|
||||
|
||||
if (
|
||||
not user
|
||||
or not user.hashed_password
|
||||
or not verify_password(password, user.hashed_password)
|
||||
):
|
||||
raise HTTPException(status_code=401, detail="Invalid credentials")
|
||||
|
||||
if not user.is_active:
|
||||
raise HTTPException(status_code=403, detail="Account disabled")
|
||||
|
||||
cookie_auth.set_cookie(response, str(user.id))
|
||||
|
||||
|
||||
@router.post("/logout", status_code=204)
|
||||
async def logout(response: Response):
|
||||
cookie_auth.delete_cookie(response)
|
||||
|
||||
|
||||
@router.get("/me", response_model=UserResponse)
|
||||
async def me(user: User = Security(auth)):
|
||||
return user
|
||||
|
||||
|
||||
@router.post("/tokens", response_model=ApiTokenResponse, status_code=201)
|
||||
async def create_token(
|
||||
body: ApiTokenCreateRequest,
|
||||
user: User = Security(auth),
|
||||
):
|
||||
raw, token_row = await create_api_token(
|
||||
user.id, name=body.name, expires_at=body.expires_at
|
||||
)
|
||||
return ApiTokenResponse(
|
||||
id=token_row.id,
|
||||
name=token_row.name,
|
||||
expires_at=token_row.expires_at,
|
||||
created_at=token_row.created_at,
|
||||
token=raw,
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/tokens/{token_id}", status_code=204)
|
||||
async def revoke_token(
|
||||
session: SessionDep,
|
||||
token_id: UUID,
|
||||
user: User = Security(auth),
|
||||
):
|
||||
if not await UserTokenCrud.first(
|
||||
session=session,
|
||||
filters=[UserToken.id == token_id, UserToken.user_id == user.id],
|
||||
):
|
||||
raise HTTPException(status_code=404, detail="Token not found")
|
||||
await UserTokenCrud.delete(
|
||||
session=session,
|
||||
filters=[UserToken.id == token_id, UserToken.user_id == user.id],
|
||||
)
|
||||
@@ -1,64 +0,0 @@
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import EmailStr
|
||||
|
||||
from fastapi_toolsets.schemas import PydanticBase
|
||||
|
||||
|
||||
class RegisterRequest(PydanticBase):
|
||||
username: str
|
||||
password: str
|
||||
email: EmailStr | None = None
|
||||
|
||||
|
||||
class UserResponse(PydanticBase):
|
||||
id: UUID
|
||||
username: str
|
||||
email: str | None
|
||||
role: str
|
||||
is_active: bool
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class ApiTokenCreateRequest(PydanticBase):
|
||||
name: str | None = None
|
||||
expires_at: datetime | None = None
|
||||
|
||||
|
||||
class ApiTokenResponse(PydanticBase):
|
||||
id: UUID
|
||||
name: str | None
|
||||
expires_at: datetime | None
|
||||
created_at: datetime
|
||||
# Only populated on creation
|
||||
token: str | None = None
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class OAuthProviderResponse(PydanticBase):
|
||||
slug: str
|
||||
name: str
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class UserCreate(PydanticBase):
|
||||
username: str
|
||||
email: str | None = None
|
||||
hashed_password: str | None = None
|
||||
|
||||
|
||||
class UserTokenCreate(PydanticBase):
|
||||
user_id: UUID
|
||||
token_hash: str
|
||||
name: str | None = None
|
||||
expires_at: datetime | None = None
|
||||
|
||||
|
||||
class OAuthAccountCreate(PydanticBase):
|
||||
user_id: UUID
|
||||
provider_id: UUID
|
||||
subject: str
|
||||
@@ -1,100 +0,0 @@
|
||||
import hashlib
|
||||
from datetime import datetime, timezone
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import HTTPException
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from fastapi_toolsets.exceptions import UnauthorizedError
|
||||
from fastapi_toolsets.security import (
|
||||
APIKeyHeaderAuth,
|
||||
BearerTokenAuth,
|
||||
CookieAuth,
|
||||
MultiAuth,
|
||||
)
|
||||
|
||||
from .crud import UserCrud, UserTokenCrud
|
||||
from .db import get_db_context
|
||||
from .models import User, UserRole, UserToken
|
||||
from .schemas import UserTokenCreate
|
||||
|
||||
SESSION_COOKIE = "session"
|
||||
SECRET_KEY = "123456789"
|
||||
|
||||
|
||||
def _hash_token(token: str) -> str:
|
||||
return hashlib.sha256(token.encode()).hexdigest()
|
||||
|
||||
|
||||
async def _verify_token(token: str, role: UserRole | None = None) -> User:
|
||||
async with get_db_context() as db:
|
||||
user_token = await UserTokenCrud.first(
|
||||
session=db,
|
||||
filters=[UserToken.token_hash == _hash_token(token)],
|
||||
load_options=[selectinload(UserToken.user)],
|
||||
)
|
||||
|
||||
if user_token is None or not user_token.user.is_active:
|
||||
raise UnauthorizedError()
|
||||
|
||||
if user_token.expires_at and user_token.expires_at < datetime.now(timezone.utc):
|
||||
raise UnauthorizedError()
|
||||
|
||||
user = user_token.user
|
||||
|
||||
if role is not None and user.role != role:
|
||||
raise HTTPException(status_code=403, detail="Insufficient permissions")
|
||||
|
||||
return user
|
||||
|
||||
|
||||
async def _verify_cookie(user_id: str, role: UserRole | None = None) -> User:
|
||||
async with get_db_context() as db:
|
||||
user = await UserCrud.first(
|
||||
session=db,
|
||||
filters=[User.id == UUID(user_id)],
|
||||
)
|
||||
|
||||
if not user or not user.is_active:
|
||||
raise UnauthorizedError()
|
||||
|
||||
if role is not None and user.role != role:
|
||||
raise HTTPException(status_code=403, detail="Insufficient permissions")
|
||||
|
||||
return user
|
||||
|
||||
|
||||
bearer_auth = BearerTokenAuth(
|
||||
validator=_verify_token,
|
||||
prefix="ctf_",
|
||||
)
|
||||
header_auth = APIKeyHeaderAuth(
|
||||
name="X-API-Key",
|
||||
validator=_verify_token,
|
||||
)
|
||||
cookie_auth = CookieAuth(
|
||||
name=SESSION_COOKIE,
|
||||
validator=_verify_cookie,
|
||||
secret_key=SECRET_KEY,
|
||||
)
|
||||
auth = MultiAuth(bearer_auth, header_auth, cookie_auth)
|
||||
|
||||
|
||||
async def create_api_token(
|
||||
user_id: UUID,
|
||||
*,
|
||||
name: str | None = None,
|
||||
expires_at: datetime | None = None,
|
||||
) -> tuple[str, UserToken]:
|
||||
raw = bearer_auth.generate_token()
|
||||
async with get_db_context() as db:
|
||||
token_row = await UserTokenCrud.create(
|
||||
session=db,
|
||||
obj=UserTokenCreate(
|
||||
user_id=user_id,
|
||||
token_hash=_hash_token(raw),
|
||||
name=name,
|
||||
expires_at=expires_at,
|
||||
),
|
||||
)
|
||||
return raw, token_row
|
||||
@@ -1,8 +1,7 @@
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from fastapi_toolsets.crud import OrderByClause, PaginationType
|
||||
from fastapi_toolsets.schemas import (
|
||||
CursorPaginatedResponse,
|
||||
OffsetPaginatedResponse,
|
||||
@@ -20,22 +19,20 @@ router = APIRouter(prefix="/articles")
|
||||
@router.get("/offset")
|
||||
async def list_articles_offset(
|
||||
session: SessionDep,
|
||||
filter_by: Annotated[dict[str, list[str]], Depends(ArticleCrud.filter_params())],
|
||||
order_by: Annotated[
|
||||
OrderByClause | None,
|
||||
Depends(ArticleCrud.order_params(default_field=Article.created_at)),
|
||||
params: Annotated[
|
||||
dict,
|
||||
Depends(
|
||||
ArticleCrud.offset_paginate_params(
|
||||
default_page_size=20,
|
||||
max_page_size=100,
|
||||
default_order_field=Article.created_at,
|
||||
)
|
||||
),
|
||||
],
|
||||
page: int = Query(1, ge=1),
|
||||
items_per_page: int = Query(20, ge=1, le=100),
|
||||
search: str | None = None,
|
||||
) -> OffsetPaginatedResponse[ArticleRead]:
|
||||
return await ArticleCrud.offset_paginate(
|
||||
session=session,
|
||||
page=page,
|
||||
items_per_page=items_per_page,
|
||||
search=search,
|
||||
filter_by=filter_by or None,
|
||||
order_by=order_by,
|
||||
**params,
|
||||
schema=ArticleRead,
|
||||
)
|
||||
|
||||
@@ -43,22 +40,20 @@ async def list_articles_offset(
|
||||
@router.get("/cursor")
|
||||
async def list_articles_cursor(
|
||||
session: SessionDep,
|
||||
filter_by: Annotated[dict[str, list[str]], Depends(ArticleCrud.filter_params())],
|
||||
order_by: Annotated[
|
||||
OrderByClause | None,
|
||||
Depends(ArticleCrud.order_params(default_field=Article.created_at)),
|
||||
params: Annotated[
|
||||
dict,
|
||||
Depends(
|
||||
ArticleCrud.cursor_paginate_params(
|
||||
default_page_size=20,
|
||||
max_page_size=100,
|
||||
default_order_field=Article.created_at,
|
||||
)
|
||||
),
|
||||
],
|
||||
cursor: str | None = None,
|
||||
items_per_page: int = Query(20, ge=1, le=100),
|
||||
search: str | None = None,
|
||||
) -> CursorPaginatedResponse[ArticleRead]:
|
||||
return await ArticleCrud.cursor_paginate(
|
||||
session=session,
|
||||
cursor=cursor,
|
||||
items_per_page=items_per_page,
|
||||
search=search,
|
||||
filter_by=filter_by or None,
|
||||
order_by=order_by,
|
||||
**params,
|
||||
schema=ArticleRead,
|
||||
)
|
||||
|
||||
@@ -66,25 +61,19 @@ async def list_articles_cursor(
|
||||
@router.get("/")
|
||||
async def list_articles(
|
||||
session: SessionDep,
|
||||
filter_by: Annotated[dict[str, list[str]], Depends(ArticleCrud.filter_params())],
|
||||
order_by: Annotated[
|
||||
OrderByClause | None,
|
||||
Depends(ArticleCrud.order_params(default_field=Article.created_at)),
|
||||
params: Annotated[
|
||||
dict,
|
||||
Depends(
|
||||
ArticleCrud.paginate_params(
|
||||
default_page_size=20,
|
||||
max_page_size=100,
|
||||
default_order_field=Article.created_at,
|
||||
)
|
||||
),
|
||||
],
|
||||
pagination_type: PaginationType = PaginationType.OFFSET,
|
||||
page: int = Query(1, ge=1),
|
||||
cursor: str | None = None,
|
||||
items_per_page: int = Query(20, ge=1, le=100),
|
||||
search: str | None = None,
|
||||
) -> PaginatedResponse[ArticleRead]:
|
||||
return await ArticleCrud.paginate(
|
||||
session,
|
||||
pagination_type=pagination_type,
|
||||
page=page,
|
||||
cursor=cursor,
|
||||
items_per_page=items_per_page,
|
||||
search=search,
|
||||
filter_by=filter_by or None,
|
||||
order_by=order_by,
|
||||
**params,
|
||||
schema=ArticleRead,
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "fastapi-toolsets"
|
||||
version = "2.3.0"
|
||||
version = "3.0.3"
|
||||
description = "Production-ready utilities for FastAPI applications"
|
||||
readme = "README.md"
|
||||
license = "MIT"
|
||||
@@ -66,8 +66,8 @@ manager = "fastapi_toolsets.cli.app:cli"
|
||||
dev = [
|
||||
{include-group = "tests"},
|
||||
{include-group = "docs"},
|
||||
{include-group = "docs-src"},
|
||||
"fastapi-toolsets[all]",
|
||||
"prek>=0.3.8",
|
||||
"ruff>=0.1.0",
|
||||
"ty>=0.0.1a0",
|
||||
]
|
||||
@@ -80,15 +80,13 @@ tests = [
|
||||
"pytest>=8.0.0",
|
||||
]
|
||||
docs = [
|
||||
"mike",
|
||||
"mkdocstrings-python>=2.0.2",
|
||||
"zensical>=0.0.23",
|
||||
]
|
||||
docs-src = [
|
||||
"bcrypt>=4.0.0",
|
||||
"zensical>=0.0.30",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.10,<0.11.0"]
|
||||
requires = ["uv_build>=0.10,<0.12.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
@@ -107,3 +105,6 @@ exclude_lines = [
|
||||
"if TYPE_CHECKING:",
|
||||
"raise NotImplementedError",
|
||||
]
|
||||
|
||||
[tool.uv.sources]
|
||||
mike = { git = "https://github.com/squidfunk/mike.git", tag = "2.2.0+zensical-0.1.0" }
|
||||
|
||||
@@ -21,4 +21,4 @@ Example usage:
|
||||
return Response(data={"user": user.username}, message="Success")
|
||||
"""
|
||||
|
||||
__version__ = "2.3.0"
|
||||
__version__ = "3.0.3"
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
"""Generic async CRUD operations for SQLAlchemy models."""
|
||||
|
||||
from ..exceptions import InvalidFacetFilterError, NoSearchableFieldsError
|
||||
from ..exceptions import (
|
||||
InvalidFacetFilterError,
|
||||
InvalidSearchColumnError,
|
||||
NoSearchableFieldsError,
|
||||
UnsupportedFacetTypeError,
|
||||
)
|
||||
from ..schemas import PaginationType
|
||||
from ..types import (
|
||||
FacetFieldType,
|
||||
JoinType,
|
||||
M2MFieldType,
|
||||
OrderByClause,
|
||||
OrderFieldType,
|
||||
SearchFieldType,
|
||||
)
|
||||
from .factory import AsyncCrud, CrudFactory
|
||||
@@ -18,11 +24,14 @@ __all__ = [
|
||||
"FacetFieldType",
|
||||
"get_searchable_fields",
|
||||
"InvalidFacetFilterError",
|
||||
"InvalidSearchColumnError",
|
||||
"JoinType",
|
||||
"M2MFieldType",
|
||||
"NoSearchableFieldsError",
|
||||
"OrderByClause",
|
||||
"OrderFieldType",
|
||||
"PaginationType",
|
||||
"SearchConfig",
|
||||
"SearchFieldType",
|
||||
"UnsupportedFacetTypeError",
|
||||
]
|
||||
|
||||
@@ -38,6 +38,7 @@ from ..types import (
|
||||
M2MFieldType,
|
||||
ModelType,
|
||||
OrderByClause,
|
||||
OrderFieldType,
|
||||
SchemaType,
|
||||
SearchFieldType,
|
||||
)
|
||||
@@ -47,6 +48,7 @@ from .search import (
|
||||
build_filter_by,
|
||||
build_search_filters,
|
||||
facet_keys,
|
||||
search_field_keys,
|
||||
)
|
||||
|
||||
|
||||
@@ -58,18 +60,33 @@ class _CursorDirection(str, Enum):
|
||||
def _encode_cursor(
|
||||
value: Any, *, direction: _CursorDirection = _CursorDirection.NEXT
|
||||
) -> str:
|
||||
"""Encode a cursor column value and navigation direction as a base64 string."""
|
||||
return base64.b64encode(
|
||||
json.dumps({"val": str(value), "dir": direction}).encode()
|
||||
).decode()
|
||||
"""Encode a cursor column value and navigation direction as a URL-safe base64 string."""
|
||||
return (
|
||||
base64.urlsafe_b64encode(
|
||||
json.dumps({"val": str(value), "dir": direction}).encode()
|
||||
)
|
||||
.decode()
|
||||
.rstrip("=")
|
||||
)
|
||||
|
||||
|
||||
def _decode_cursor(cursor: str) -> tuple[str, _CursorDirection]:
|
||||
"""Decode a cursor base64 string into ``(raw_value, direction)``."""
|
||||
payload = json.loads(base64.b64decode(cursor.encode()).decode())
|
||||
"""Decode a URL-safe base64 cursor string into ``(raw_value, direction)``."""
|
||||
padded = cursor + "=" * (-len(cursor) % 4)
|
||||
payload = json.loads(base64.urlsafe_b64decode(padded).decode())
|
||||
return payload["val"], _CursorDirection(payload["dir"])
|
||||
|
||||
|
||||
def _page_size_query(default: int, max_size: int) -> int:
|
||||
"""Return a FastAPI ``Query`` for the ``items_per_page`` parameter."""
|
||||
return Query(
|
||||
default,
|
||||
ge=1,
|
||||
le=max_size,
|
||||
description=f"Number of items per page (max {max_size})",
|
||||
)
|
||||
|
||||
|
||||
def _parse_cursor_value(raw_val: str, col_type: Any) -> Any:
|
||||
"""Parse a raw cursor string value back into the appropriate Python type."""
|
||||
if isinstance(col_type, Integer):
|
||||
@@ -100,8 +117,12 @@ def _apply_joins(q: Any, joins: JoinType | None, outer_join: bool) -> Any:
|
||||
|
||||
def _apply_search_joins(q: Any, search_joins: list[Any]) -> Any:
|
||||
"""Apply relationship-based outer joins (from search/filter_by) to a query."""
|
||||
seen: set[str] = set()
|
||||
for join_rel in search_joins:
|
||||
q = q.outerjoin(join_rel)
|
||||
key = str(join_rel)
|
||||
if key not in seen:
|
||||
seen.add(key)
|
||||
q = q.outerjoin(join_rel)
|
||||
return q
|
||||
|
||||
|
||||
@@ -114,7 +135,7 @@ class AsyncCrud(Generic[ModelType]):
|
||||
model: ClassVar[type[DeclarativeBase]]
|
||||
searchable_fields: ClassVar[Sequence[SearchFieldType] | None] = None
|
||||
facet_fields: ClassVar[Sequence[FacetFieldType] | None] = None
|
||||
order_fields: ClassVar[Sequence[QueryableAttribute[Any]] | None] = None
|
||||
order_fields: ClassVar[Sequence[OrderFieldType] | None] = None
|
||||
m2m_fields: ClassVar[M2MFieldType | None] = None
|
||||
default_load_options: ClassVar[Sequence[ExecutableOption] | None] = None
|
||||
cursor_column: ClassVar[Any | None] = None
|
||||
@@ -149,6 +170,18 @@ class AsyncCrud(Generic[ModelType]):
|
||||
return load_options
|
||||
return cls.default_load_options
|
||||
|
||||
@classmethod
|
||||
async def _reload_with_options(
|
||||
cls: type[Self], session: AsyncSession, instance: ModelType
|
||||
) -> ModelType:
|
||||
"""Re-query instance by PK with default_load_options applied."""
|
||||
mapper = cls.model.__mapper__
|
||||
pk_filters = [
|
||||
getattr(cls.model, col.key) == getattr(instance, col.key)
|
||||
for col in mapper.primary_key
|
||||
]
|
||||
return await cls.get(session, filters=pk_filters)
|
||||
|
||||
@classmethod
|
||||
async def _resolve_m2m(
|
||||
cls: type[Self],
|
||||
@@ -248,107 +281,388 @@ class AsyncCrud(Generic[ModelType]):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def filter_params(
|
||||
def _resolve_search_columns(
|
||||
cls: type[Self],
|
||||
search_fields: Sequence[SearchFieldType] | None,
|
||||
) -> list[str] | None:
|
||||
"""Return search column keys, or None if no searchable fields configured."""
|
||||
fields = search_fields if search_fields is not None else cls.searchable_fields
|
||||
if not fields:
|
||||
return None
|
||||
return search_field_keys(fields)
|
||||
|
||||
@classmethod
|
||||
def _resolve_order_columns(
|
||||
cls: type[Self],
|
||||
order_fields: Sequence[OrderFieldType] | None,
|
||||
) -> list[str] | None:
|
||||
"""Return sort column keys, or None if no order fields configured."""
|
||||
fields = order_fields if order_fields is not None else cls.order_fields
|
||||
if not fields:
|
||||
return None
|
||||
return sorted(facet_keys(fields))
|
||||
|
||||
@classmethod
|
||||
def _build_paginate_params(
|
||||
cls: type[Self],
|
||||
*,
|
||||
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||
) -> Callable[..., Awaitable[dict[str, list[str]]]]:
|
||||
"""Return a FastAPI dependency that collects facet filter values from query parameters.
|
||||
Args:
|
||||
facet_fields: Override the facet fields for this dependency. Falls back to the
|
||||
class-level ``facet_fields`` if not provided.
|
||||
pagination_params: list[inspect.Parameter],
|
||||
pagination_fixed: dict[str, Any],
|
||||
dep_name: str,
|
||||
search: bool,
|
||||
filter: bool,
|
||||
order: bool,
|
||||
search_fields: Sequence[SearchFieldType] | None,
|
||||
facet_fields: Sequence[FacetFieldType] | None,
|
||||
order_fields: Sequence[OrderFieldType] | None,
|
||||
default_order_field: QueryableAttribute[Any] | None,
|
||||
default_order: Literal["asc", "desc"],
|
||||
) -> Callable[..., Awaitable[dict[str, Any]]]:
|
||||
"""Build a consolidated FastAPI dependency that merges pagination, search, filter, and order params."""
|
||||
all_params: list[inspect.Parameter] = list(pagination_params)
|
||||
pagination_param_names = tuple(p.name for p in pagination_params)
|
||||
reserved_names: set[str] = set(pagination_param_names)
|
||||
|
||||
Returns:
|
||||
An async dependency function named ``{Model}FilterParams`` that resolves to a
|
||||
``dict[str, list[str]]`` containing only the keys that were supplied in the
|
||||
request (absent/``None`` parameters are excluded).
|
||||
|
||||
Raises:
|
||||
ValueError: If no facet fields are configured on this CRUD class and none are
|
||||
provided via ``facet_fields``.
|
||||
"""
|
||||
fields = cls._resolve_facet_fields(facet_fields)
|
||||
if not fields:
|
||||
raise ValueError(
|
||||
f"{cls.__name__} has no facet_fields configured. "
|
||||
"Pass facet_fields= or set them on CrudFactory."
|
||||
)
|
||||
keys = facet_keys(fields)
|
||||
|
||||
async def dependency(**kwargs: Any) -> dict[str, list[str]]:
|
||||
return {k: v for k, v in kwargs.items() if v is not None}
|
||||
|
||||
dependency.__name__ = f"{cls.model.__name__}FilterParams"
|
||||
dependency.__signature__ = inspect.Signature( # type: ignore[attr-defined]
|
||||
parameters=[
|
||||
inspect.Parameter(
|
||||
k,
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=list[str] | None,
|
||||
default=Query(default=None),
|
||||
search_keys: list[str] | None = None
|
||||
if search:
|
||||
search_keys = cls._resolve_search_columns(search_fields)
|
||||
if search_keys:
|
||||
all_params.extend(
|
||||
[
|
||||
inspect.Parameter(
|
||||
"search",
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=str | None,
|
||||
default=Query(
|
||||
default=None, description="Search query string"
|
||||
),
|
||||
),
|
||||
inspect.Parameter(
|
||||
"search_column",
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=str | None,
|
||||
default=Query(
|
||||
default=None,
|
||||
description="Restrict search to a single column",
|
||||
enum=search_keys,
|
||||
),
|
||||
),
|
||||
]
|
||||
)
|
||||
for k in keys
|
||||
]
|
||||
)
|
||||
reserved_names.update({"search", "search_column"})
|
||||
|
||||
filter_keys: list[str] | None = None
|
||||
if filter:
|
||||
resolved_facets = cls._resolve_facet_fields(facet_fields)
|
||||
if resolved_facets:
|
||||
filter_keys = facet_keys(resolved_facets)
|
||||
for k in filter_keys:
|
||||
if k in reserved_names:
|
||||
raise ValueError(
|
||||
f"Facet field key {k!r} conflicts with a reserved "
|
||||
f"parameter name. Reserved names: {sorted(reserved_names)}"
|
||||
)
|
||||
all_params.extend(
|
||||
inspect.Parameter(
|
||||
k,
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=list[str] | None,
|
||||
default=Query(default=None),
|
||||
)
|
||||
for k in filter_keys
|
||||
)
|
||||
reserved_names.update(filter_keys)
|
||||
|
||||
order_field_map: dict[str, OrderFieldType] | None = None
|
||||
order_valid_keys: list[str] | None = None
|
||||
if order:
|
||||
resolved_order = (
|
||||
order_fields if order_fields is not None else cls.order_fields
|
||||
)
|
||||
if resolved_order:
|
||||
keys = facet_keys(resolved_order)
|
||||
order_field_map = dict(zip(keys, resolved_order))
|
||||
order_valid_keys = sorted(order_field_map.keys())
|
||||
all_params.extend(
|
||||
[
|
||||
inspect.Parameter(
|
||||
"order_by",
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=str | None,
|
||||
default=Query(
|
||||
None,
|
||||
description=f"Field to order by. Valid values: {order_valid_keys}",
|
||||
enum=order_valid_keys,
|
||||
),
|
||||
),
|
||||
inspect.Parameter(
|
||||
"order",
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=Literal["asc", "desc"],
|
||||
default=Query(default_order, description="Sort direction"),
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
async def dependency(**kwargs: Any) -> dict[str, Any]:
|
||||
result: dict[str, Any] = dict(pagination_fixed)
|
||||
for name in pagination_param_names:
|
||||
result[name] = kwargs[name]
|
||||
|
||||
if search_keys is not None:
|
||||
search_val = kwargs.get("search")
|
||||
if search_val is not None:
|
||||
result["search"] = search_val
|
||||
search_col_val = kwargs.get("search_column")
|
||||
if search_col_val is not None:
|
||||
result["search_column"] = search_col_val
|
||||
|
||||
if filter_keys is not None:
|
||||
filter_by = {
|
||||
k: kwargs[k] for k in filter_keys if kwargs.get(k) is not None
|
||||
}
|
||||
result["filter_by"] = filter_by or None
|
||||
|
||||
if order_field_map is not None:
|
||||
order_by_val = kwargs.get("order_by")
|
||||
order_dir = kwargs.get("order", default_order)
|
||||
if order_by_val is None:
|
||||
field = default_order_field
|
||||
elif order_by_val not in order_field_map:
|
||||
raise InvalidOrderFieldError(order_by_val, order_valid_keys or [])
|
||||
else:
|
||||
field = order_field_map[order_by_val]
|
||||
if field is not None:
|
||||
if isinstance(field, tuple):
|
||||
col = field[-1]
|
||||
result["order_by"] = (
|
||||
col.asc() if order_dir == "asc" else col.desc()
|
||||
)
|
||||
result["order_joins"] = list(field[:-1])
|
||||
else:
|
||||
result["order_by"] = (
|
||||
field.asc() if order_dir == "asc" else field.desc()
|
||||
)
|
||||
else:
|
||||
result["order_by"] = None
|
||||
|
||||
return result
|
||||
|
||||
dependency.__name__ = dep_name
|
||||
dependency.__signature__ = inspect.Signature( # type: ignore[attr-defined] # ty:ignore[unresolved-attribute]
|
||||
parameters=all_params,
|
||||
)
|
||||
return dependency
|
||||
|
||||
@classmethod
|
||||
def order_params(
|
||||
def offset_paginate_params(
|
||||
cls: type[Self],
|
||||
*,
|
||||
order_fields: Sequence[QueryableAttribute[Any]] | None = None,
|
||||
default_field: QueryableAttribute[Any] | None = None,
|
||||
default_page_size: int = 20,
|
||||
max_page_size: int = 100,
|
||||
include_total: bool = True,
|
||||
search: bool = True,
|
||||
filter: bool = True,
|
||||
order: bool = True,
|
||||
search_fields: Sequence[SearchFieldType] | None = None,
|
||||
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||
order_fields: Sequence[OrderFieldType] | None = None,
|
||||
default_order_field: QueryableAttribute[Any] | None = None,
|
||||
default_order: Literal["asc", "desc"] = "asc",
|
||||
) -> Callable[..., Awaitable[OrderByClause | None]]:
|
||||
"""Return a FastAPI dependency that resolves order query params into an order_by clause.
|
||||
) -> Callable[..., Awaitable[dict[str, Any]]]:
|
||||
"""Return a FastAPI dependency that collects all params for :meth:`offset_paginate`.
|
||||
|
||||
Args:
|
||||
order_fields: Override the allowed order fields. Falls back to the class-level
|
||||
``order_fields`` if not provided.
|
||||
default_field: Field to order by when ``order_by`` query param is absent.
|
||||
If ``None`` and no ``order_by`` is provided, no ordering is applied.
|
||||
default_order: Default order direction when ``order`` is absent
|
||||
(``"asc"`` or ``"desc"``).
|
||||
default_page_size: Default ``items_per_page`` value.
|
||||
max_page_size: Maximum ``items_per_page`` value.
|
||||
include_total: Whether to include total count (not a query param).
|
||||
search: Enable search query parameters.
|
||||
filter: Enable facet filter query parameters.
|
||||
order: Enable order query parameters.
|
||||
search_fields: Override searchable fields.
|
||||
facet_fields: Override facet fields.
|
||||
order_fields: Override order fields.
|
||||
default_order_field: Default field to order by when ``order_by`` is absent.
|
||||
default_order: Default sort direction.
|
||||
|
||||
Returns:
|
||||
An async dependency function named ``{Model}OrderParams`` that resolves to an
|
||||
``OrderByClause`` (or ``None``). Pass it to ``Depends()`` in your route.
|
||||
|
||||
Raises:
|
||||
ValueError: If no order fields are configured on this CRUD class and none are
|
||||
provided via ``order_fields``.
|
||||
InvalidOrderFieldError: When the request provides an unknown ``order_by`` value.
|
||||
An async dependency that resolves to a dict ready to be unpacked
|
||||
into :meth:`offset_paginate`.
|
||||
"""
|
||||
fields = order_fields if order_fields is not None else cls.order_fields
|
||||
if not fields:
|
||||
raise ValueError(
|
||||
f"{cls.__name__} has no order_fields configured. "
|
||||
"Pass order_fields= or set them on CrudFactory."
|
||||
)
|
||||
field_map: dict[str, QueryableAttribute[Any]] = {f.key: f for f in fields}
|
||||
valid_keys = sorted(field_map.keys())
|
||||
|
||||
async def dependency(
|
||||
order_by: str | None = Query(
|
||||
None, description=f"Field to order by. Valid values: {valid_keys}"
|
||||
pagination_params = [
|
||||
inspect.Parameter(
|
||||
"page",
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=int,
|
||||
default=Query(1, ge=1, description="Page number (1-indexed)"),
|
||||
),
|
||||
order: Literal["asc", "desc"] = Query(
|
||||
default_order, description="Sort direction"
|
||||
inspect.Parameter(
|
||||
"items_per_page",
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=int,
|
||||
default=_page_size_query(default_page_size, max_page_size),
|
||||
),
|
||||
) -> OrderByClause | None:
|
||||
if order_by is None:
|
||||
if default_field is None:
|
||||
return None
|
||||
field = default_field
|
||||
elif order_by not in field_map:
|
||||
raise InvalidOrderFieldError(order_by, valid_keys)
|
||||
else:
|
||||
field = field_map[order_by]
|
||||
return field.asc() if order == "asc" else field.desc()
|
||||
]
|
||||
return cls._build_paginate_params(
|
||||
pagination_params=pagination_params,
|
||||
pagination_fixed={"include_total": include_total},
|
||||
dep_name=f"{cls.model.__name__}OffsetPaginateParams",
|
||||
search=search,
|
||||
filter=filter,
|
||||
order=order,
|
||||
search_fields=search_fields,
|
||||
facet_fields=facet_fields,
|
||||
order_fields=order_fields,
|
||||
default_order_field=default_order_field,
|
||||
default_order=default_order,
|
||||
)
|
||||
|
||||
dependency.__name__ = f"{cls.model.__name__}OrderParams"
|
||||
return dependency
|
||||
@classmethod
|
||||
def cursor_paginate_params(
|
||||
cls: type[Self],
|
||||
*,
|
||||
default_page_size: int = 20,
|
||||
max_page_size: int = 100,
|
||||
search: bool = True,
|
||||
filter: bool = True,
|
||||
order: bool = True,
|
||||
search_fields: Sequence[SearchFieldType] | None = None,
|
||||
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||
order_fields: Sequence[OrderFieldType] | None = None,
|
||||
default_order_field: QueryableAttribute[Any] | None = None,
|
||||
default_order: Literal["asc", "desc"] = "asc",
|
||||
) -> Callable[..., Awaitable[dict[str, Any]]]:
|
||||
"""Return a FastAPI dependency that collects all params for :meth:`cursor_paginate`.
|
||||
|
||||
Args:
|
||||
default_page_size: Default ``items_per_page`` value.
|
||||
max_page_size: Maximum ``items_per_page`` value.
|
||||
search: Enable search query parameters.
|
||||
filter: Enable facet filter query parameters.
|
||||
order: Enable order query parameters.
|
||||
search_fields: Override searchable fields.
|
||||
facet_fields: Override facet fields.
|
||||
order_fields: Override order fields.
|
||||
default_order_field: Default field to order by when ``order_by`` is absent.
|
||||
default_order: Default sort direction.
|
||||
|
||||
Returns:
|
||||
An async dependency that resolves to a dict ready to be unpacked
|
||||
into :meth:`cursor_paginate`.
|
||||
"""
|
||||
pagination_params = [
|
||||
inspect.Parameter(
|
||||
"cursor",
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=str | None,
|
||||
default=Query(
|
||||
None, description="Cursor token from a previous response"
|
||||
),
|
||||
),
|
||||
inspect.Parameter(
|
||||
"items_per_page",
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=int,
|
||||
default=_page_size_query(default_page_size, max_page_size),
|
||||
),
|
||||
]
|
||||
return cls._build_paginate_params(
|
||||
pagination_params=pagination_params,
|
||||
pagination_fixed={},
|
||||
dep_name=f"{cls.model.__name__}CursorPaginateParams",
|
||||
search=search,
|
||||
filter=filter,
|
||||
order=order,
|
||||
search_fields=search_fields,
|
||||
facet_fields=facet_fields,
|
||||
order_fields=order_fields,
|
||||
default_order_field=default_order_field,
|
||||
default_order=default_order,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def paginate_params(
|
||||
cls: type[Self],
|
||||
*,
|
||||
default_page_size: int = 20,
|
||||
max_page_size: int = 100,
|
||||
default_pagination_type: PaginationType = PaginationType.OFFSET,
|
||||
include_total: bool = True,
|
||||
search: bool = True,
|
||||
filter: bool = True,
|
||||
order: bool = True,
|
||||
search_fields: Sequence[SearchFieldType] | None = None,
|
||||
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||
order_fields: Sequence[OrderFieldType] | None = None,
|
||||
default_order_field: QueryableAttribute[Any] | None = None,
|
||||
default_order: Literal["asc", "desc"] = "asc",
|
||||
) -> Callable[..., Awaitable[dict[str, Any]]]:
|
||||
"""Return a FastAPI dependency that collects all params for :meth:`paginate`.
|
||||
|
||||
Args:
|
||||
default_page_size: Default ``items_per_page`` value.
|
||||
max_page_size: Maximum ``items_per_page`` value.
|
||||
default_pagination_type: Default pagination strategy.
|
||||
include_total: Whether to include total count (not a query param).
|
||||
search: Enable search query parameters.
|
||||
filter: Enable facet filter query parameters.
|
||||
order: Enable order query parameters.
|
||||
search_fields: Override searchable fields.
|
||||
facet_fields: Override facet fields.
|
||||
order_fields: Override order fields.
|
||||
default_order_field: Default field to order by when ``order_by`` is absent.
|
||||
default_order: Default sort direction.
|
||||
|
||||
Returns:
|
||||
An async dependency that resolves to a dict ready to be unpacked
|
||||
into :meth:`paginate`.
|
||||
"""
|
||||
pagination_params = [
|
||||
inspect.Parameter(
|
||||
"pagination_type",
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=PaginationType,
|
||||
default=Query(
|
||||
default_pagination_type, description="Pagination strategy"
|
||||
),
|
||||
),
|
||||
inspect.Parameter(
|
||||
"page",
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=int,
|
||||
default=Query(
|
||||
1, ge=1, description="Page number (1-indexed, offset only)"
|
||||
),
|
||||
),
|
||||
inspect.Parameter(
|
||||
"cursor",
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=str | None,
|
||||
default=Query(
|
||||
None,
|
||||
description="Cursor token from a previous response (cursor only)",
|
||||
),
|
||||
),
|
||||
inspect.Parameter(
|
||||
"items_per_page",
|
||||
inspect.Parameter.KEYWORD_ONLY,
|
||||
annotation=int,
|
||||
default=_page_size_query(default_page_size, max_page_size),
|
||||
),
|
||||
]
|
||||
return cls._build_paginate_params(
|
||||
pagination_params=pagination_params,
|
||||
pagination_fixed={"include_total": include_total},
|
||||
dep_name=f"{cls.model.__name__}PaginateParams",
|
||||
search=search,
|
||||
filter=filter,
|
||||
order=order,
|
||||
search_fields=search_fields,
|
||||
facet_fields=facet_fields,
|
||||
order_fields=order_fields,
|
||||
default_order_field=default_order_field,
|
||||
default_order=default_order,
|
||||
)
|
||||
|
||||
@overload
|
||||
@classmethod
|
||||
@@ -403,6 +717,8 @@ class AsyncCrud(Generic[ModelType]):
|
||||
|
||||
session.add(db_model)
|
||||
await session.refresh(db_model)
|
||||
if cls.default_load_options:
|
||||
db_model = await cls._reload_with_options(session, db_model)
|
||||
result = cast(ModelType, db_model)
|
||||
if schema:
|
||||
return Response(data=schema.model_validate(result))
|
||||
@@ -758,6 +1074,8 @@ class AsyncCrud(Generic[ModelType]):
|
||||
for rel_attr, related_instances in m2m_resolved.items():
|
||||
setattr(db_model, rel_attr, related_instances)
|
||||
await session.refresh(db_model)
|
||||
if cls.default_load_options:
|
||||
db_model = await cls._reload_with_options(session, db_model)
|
||||
if schema:
|
||||
return Response(data=schema.model_validate(db_model))
|
||||
return db_model
|
||||
@@ -920,10 +1238,14 @@ class AsyncCrud(Generic[ModelType]):
|
||||
outer_join: bool = False,
|
||||
load_options: Sequence[ExecutableOption] | None = None,
|
||||
order_by: OrderByClause | None = None,
|
||||
order_joins: list[Any] | None = None,
|
||||
page: int = 1,
|
||||
items_per_page: int = 20,
|
||||
include_total: bool = True,
|
||||
search: str | SearchConfig | None = None,
|
||||
search_fields: Sequence[SearchFieldType] | None = None,
|
||||
search_column: str | None = None,
|
||||
order_fields: Sequence[OrderFieldType] | None = None,
|
||||
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||
filter_by: dict[str, Any] | BaseModel | None = None,
|
||||
schema: type[BaseModel],
|
||||
@@ -939,8 +1261,12 @@ class AsyncCrud(Generic[ModelType]):
|
||||
order_by: Column or list of columns to order by
|
||||
page: Page number (1-indexed)
|
||||
items_per_page: Number of items per page
|
||||
include_total: When ``False``, skip the ``COUNT`` query;
|
||||
``pagination.total_count`` will be ``None``.
|
||||
search: Search query string or SearchConfig object
|
||||
search_fields: Fields to search in (overrides class default)
|
||||
search_column: Restrict search to a single column key.
|
||||
order_fields: Fields allowed for sorting (overrides class default).
|
||||
facet_fields: Columns to compute distinct values for (overrides class default)
|
||||
filter_by: Dict of {column_key: value} to filter by declared facet fields.
|
||||
Keys must match the column.key of a facet field. Scalar → equality,
|
||||
@@ -963,6 +1289,7 @@ class AsyncCrud(Generic[ModelType]):
|
||||
search,
|
||||
search_fields=search_fields,
|
||||
default_fields=cls.searchable_fields,
|
||||
search_column=search_column,
|
||||
)
|
||||
filters.extend(search_filters)
|
||||
search_joins.extend(new_search_joins)
|
||||
@@ -976,6 +1303,10 @@ class AsyncCrud(Generic[ModelType]):
|
||||
# Apply search joins (always outer joins for search)
|
||||
q = _apply_search_joins(q, search_joins)
|
||||
|
||||
# Apply order joins (relation joins required for order_by field)
|
||||
if order_joins:
|
||||
q = _apply_search_joins(q, order_joins)
|
||||
|
||||
if filters:
|
||||
q = q.where(and_(*filters))
|
||||
if resolved := cls._resolve_load_options(load_options):
|
||||
@@ -983,31 +1314,44 @@ class AsyncCrud(Generic[ModelType]):
|
||||
if order_by is not None:
|
||||
q = q.order_by(order_by)
|
||||
|
||||
q = q.offset(offset).limit(items_per_page)
|
||||
result = await session.execute(q)
|
||||
raw_items = cast(list[ModelType], result.unique().scalars().all())
|
||||
if include_total:
|
||||
q = q.offset(offset).limit(items_per_page)
|
||||
result = await session.execute(q)
|
||||
raw_items = cast(list[ModelType], result.unique().scalars().all())
|
||||
|
||||
# Count query (with same joins and filters)
|
||||
pk_col = cls.model.__mapper__.primary_key[0]
|
||||
count_q = select(func.count(func.distinct(getattr(cls.model, pk_col.name))))
|
||||
count_q = count_q.select_from(cls.model)
|
||||
|
||||
# Apply explicit joins to count query
|
||||
count_q = _apply_joins(count_q, joins, outer_join)
|
||||
|
||||
# Apply search joins to count query
|
||||
count_q = _apply_search_joins(count_q, search_joins)
|
||||
|
||||
if filters:
|
||||
count_q = count_q.where(and_(*filters))
|
||||
|
||||
count_result = await session.execute(count_q)
|
||||
total_count: int | None = count_result.scalar_one()
|
||||
has_more = page * items_per_page < total_count
|
||||
else:
|
||||
# Fetch one extra row to detect if a next page exists without COUNT
|
||||
q = q.offset(offset).limit(items_per_page + 1)
|
||||
result = await session.execute(q)
|
||||
raw_items = cast(list[ModelType], result.unique().scalars().all())
|
||||
has_more = len(raw_items) > items_per_page
|
||||
raw_items = raw_items[:items_per_page]
|
||||
total_count = None
|
||||
|
||||
items: list[Any] = [schema.model_validate(item) for item in raw_items]
|
||||
|
||||
# Count query (with same joins and filters)
|
||||
pk_col = cls.model.__mapper__.primary_key[0]
|
||||
count_q = select(func.count(func.distinct(getattr(cls.model, pk_col.name))))
|
||||
count_q = count_q.select_from(cls.model)
|
||||
|
||||
# Apply explicit joins to count query
|
||||
count_q = _apply_joins(count_q, joins, outer_join)
|
||||
|
||||
# Apply search joins to count query
|
||||
count_q = _apply_search_joins(count_q, search_joins)
|
||||
|
||||
if filters:
|
||||
count_q = count_q.where(and_(*filters))
|
||||
|
||||
count_result = await session.execute(count_q)
|
||||
total_count = count_result.scalar_one()
|
||||
|
||||
filter_attributes = await cls._build_filter_attributes(
|
||||
session, facet_fields, filters, search_joins
|
||||
)
|
||||
search_columns = cls._resolve_search_columns(search_fields)
|
||||
order_columns = cls._resolve_order_columns(order_fields)
|
||||
|
||||
return OffsetPaginatedResponse(
|
||||
data=items,
|
||||
@@ -1015,9 +1359,11 @@ class AsyncCrud(Generic[ModelType]):
|
||||
total_count=total_count,
|
||||
items_per_page=items_per_page,
|
||||
page=page,
|
||||
has_more=page * items_per_page < total_count,
|
||||
has_more=has_more,
|
||||
),
|
||||
filter_attributes=filter_attributes,
|
||||
search_columns=search_columns,
|
||||
order_columns=order_columns,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -1031,9 +1377,12 @@ class AsyncCrud(Generic[ModelType]):
|
||||
outer_join: bool = False,
|
||||
load_options: Sequence[ExecutableOption] | None = None,
|
||||
order_by: OrderByClause | None = None,
|
||||
order_joins: list[Any] | None = None,
|
||||
items_per_page: int = 20,
|
||||
search: str | SearchConfig | None = None,
|
||||
search_fields: Sequence[SearchFieldType] | None = None,
|
||||
search_column: str | None = None,
|
||||
order_fields: Sequence[OrderFieldType] | None = None,
|
||||
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||
filter_by: dict[str, Any] | BaseModel | None = None,
|
||||
schema: type[BaseModel],
|
||||
@@ -1054,6 +1403,8 @@ class AsyncCrud(Generic[ModelType]):
|
||||
items_per_page: Number of items per page (default 20).
|
||||
search: Search query string or SearchConfig object.
|
||||
search_fields: Fields to search in (overrides class default).
|
||||
search_column: Restrict search to a single column key.
|
||||
order_fields: Fields allowed for sorting (overrides class default).
|
||||
facet_fields: Columns to compute distinct values for (overrides class default).
|
||||
filter_by: Dict of {column_key: value} to filter by declared facet fields.
|
||||
Keys must match the column.key of a facet field. Scalar → equality,
|
||||
@@ -1093,6 +1444,7 @@ class AsyncCrud(Generic[ModelType]):
|
||||
search,
|
||||
search_fields=search_fields,
|
||||
default_fields=cls.searchable_fields,
|
||||
search_column=search_column,
|
||||
)
|
||||
filters.extend(search_filters)
|
||||
search_joins.extend(new_search_joins)
|
||||
@@ -1106,6 +1458,10 @@ class AsyncCrud(Generic[ModelType]):
|
||||
# Apply search joins (always outer joins)
|
||||
q = _apply_search_joins(q, search_joins)
|
||||
|
||||
# Apply order joins (relation joins required for order_by field)
|
||||
if order_joins:
|
||||
q = _apply_search_joins(q, order_joins)
|
||||
|
||||
if filters:
|
||||
q = q.where(and_(*filters))
|
||||
if resolved := cls._resolve_load_options(load_options):
|
||||
@@ -1163,6 +1519,8 @@ class AsyncCrud(Generic[ModelType]):
|
||||
filter_attributes = await cls._build_filter_attributes(
|
||||
session, facet_fields, filters, search_joins
|
||||
)
|
||||
search_columns = cls._resolve_search_columns(search_fields)
|
||||
order_columns = cls._resolve_order_columns(order_fields)
|
||||
|
||||
return CursorPaginatedResponse(
|
||||
data=items,
|
||||
@@ -1173,6 +1531,8 @@ class AsyncCrud(Generic[ModelType]):
|
||||
has_more=has_more,
|
||||
),
|
||||
filter_attributes=filter_attributes,
|
||||
search_columns=search_columns,
|
||||
order_columns=order_columns,
|
||||
)
|
||||
|
||||
@overload
|
||||
@@ -1187,11 +1547,15 @@ class AsyncCrud(Generic[ModelType]):
|
||||
outer_join: bool = ...,
|
||||
load_options: Sequence[ExecutableOption] | None = ...,
|
||||
order_by: OrderByClause | None = ...,
|
||||
order_joins: list[Any] | None = ...,
|
||||
page: int = ...,
|
||||
cursor: str | None = ...,
|
||||
items_per_page: int = ...,
|
||||
include_total: bool = ...,
|
||||
search: str | SearchConfig | None = ...,
|
||||
search_fields: Sequence[SearchFieldType] | None = ...,
|
||||
search_column: str | None = ...,
|
||||
order_fields: Sequence[OrderFieldType] | None = ...,
|
||||
facet_fields: Sequence[FacetFieldType] | None = ...,
|
||||
filter_by: dict[str, Any] | BaseModel | None = ...,
|
||||
schema: type[BaseModel],
|
||||
@@ -1209,11 +1573,15 @@ class AsyncCrud(Generic[ModelType]):
|
||||
outer_join: bool = ...,
|
||||
load_options: Sequence[ExecutableOption] | None = ...,
|
||||
order_by: OrderByClause | None = ...,
|
||||
order_joins: list[Any] | None = ...,
|
||||
page: int = ...,
|
||||
cursor: str | None = ...,
|
||||
items_per_page: int = ...,
|
||||
include_total: bool = ...,
|
||||
search: str | SearchConfig | None = ...,
|
||||
search_fields: Sequence[SearchFieldType] | None = ...,
|
||||
search_column: str | None = ...,
|
||||
order_fields: Sequence[OrderFieldType] | None = ...,
|
||||
facet_fields: Sequence[FacetFieldType] | None = ...,
|
||||
filter_by: dict[str, Any] | BaseModel | None = ...,
|
||||
schema: type[BaseModel],
|
||||
@@ -1230,11 +1598,15 @@ class AsyncCrud(Generic[ModelType]):
|
||||
outer_join: bool = False,
|
||||
load_options: Sequence[ExecutableOption] | None = None,
|
||||
order_by: OrderByClause | None = None,
|
||||
order_joins: list[Any] | None = None,
|
||||
page: int = 1,
|
||||
cursor: str | None = None,
|
||||
items_per_page: int = 20,
|
||||
include_total: bool = True,
|
||||
search: str | SearchConfig | None = None,
|
||||
search_fields: Sequence[SearchFieldType] | None = None,
|
||||
search_column: str | None = None,
|
||||
order_fields: Sequence[OrderFieldType] | None = None,
|
||||
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||
filter_by: dict[str, Any] | BaseModel | None = None,
|
||||
schema: type[BaseModel],
|
||||
@@ -1258,8 +1630,12 @@ class AsyncCrud(Generic[ModelType]):
|
||||
:class:`.CursorPaginatedResponse`. Only used when
|
||||
``pagination_type`` is ``CURSOR``.
|
||||
items_per_page: Number of items per page (default 20).
|
||||
include_total: When ``False``, skip the ``COUNT`` query;
|
||||
only applies when ``pagination_type`` is ``OFFSET``.
|
||||
search: Search query string or :class:`.SearchConfig` object.
|
||||
search_fields: Fields to search in (overrides class default).
|
||||
search_column: Restrict search to a single column key.
|
||||
order_fields: Fields allowed for sorting (overrides class default).
|
||||
facet_fields: Columns to compute distinct values for (overrides
|
||||
class default).
|
||||
filter_by: Dict of ``{column_key: value}`` to filter by declared
|
||||
@@ -1285,9 +1661,12 @@ class AsyncCrud(Generic[ModelType]):
|
||||
outer_join=outer_join,
|
||||
load_options=load_options,
|
||||
order_by=order_by,
|
||||
order_joins=order_joins,
|
||||
items_per_page=items_per_page,
|
||||
search=search,
|
||||
search_fields=search_fields,
|
||||
search_column=search_column,
|
||||
order_fields=order_fields,
|
||||
facet_fields=facet_fields,
|
||||
filter_by=filter_by,
|
||||
schema=schema,
|
||||
@@ -1302,10 +1681,14 @@ class AsyncCrud(Generic[ModelType]):
|
||||
outer_join=outer_join,
|
||||
load_options=load_options,
|
||||
order_by=order_by,
|
||||
order_joins=order_joins,
|
||||
page=page,
|
||||
items_per_page=items_per_page,
|
||||
include_total=include_total,
|
||||
search=search,
|
||||
search_fields=search_fields,
|
||||
search_column=search_column,
|
||||
order_fields=order_fields,
|
||||
facet_fields=facet_fields,
|
||||
filter_by=filter_by,
|
||||
schema=schema,
|
||||
@@ -1320,7 +1703,7 @@ def CrudFactory(
|
||||
base_class: type[AsyncCrud[Any]] = AsyncCrud,
|
||||
searchable_fields: Sequence[SearchFieldType] | None = None,
|
||||
facet_fields: Sequence[FacetFieldType] | None = None,
|
||||
order_fields: Sequence[QueryableAttribute[Any]] | None = None,
|
||||
order_fields: Sequence[OrderFieldType] | None = None,
|
||||
m2m_fields: M2MFieldType | None = None,
|
||||
default_load_options: Sequence[ExecutableOption] | None = None,
|
||||
cursor_column: Any | None = None,
|
||||
@@ -1337,7 +1720,7 @@ def CrudFactory(
|
||||
responses. Supports direct columns (``User.status``) and relationship tuples
|
||||
(``(User.role, Role.name)``). Can be overridden per call.
|
||||
order_fields: Optional list of model attributes that callers are allowed to order by
|
||||
via ``order_params()``. Can be overridden per call.
|
||||
via ``offset_paginate_params()``. Can be overridden per call.
|
||||
m2m_fields: Optional mapping for many-to-many relationships.
|
||||
Maps schema field names (containing lists of IDs) to
|
||||
SQLAlchemy relationship attributes.
|
||||
|
||||
@@ -2,17 +2,32 @@
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
from collections import Counter
|
||||
from collections.abc import Sequence
|
||||
from dataclasses import dataclass, replace
|
||||
from typing import TYPE_CHECKING, Any, Literal
|
||||
|
||||
from sqlalchemy import String, and_, or_, select
|
||||
from sqlalchemy import String, and_, func, or_, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import DeclarativeBase
|
||||
from sqlalchemy.orm.attributes import InstrumentedAttribute
|
||||
from sqlalchemy.types import (
|
||||
ARRAY,
|
||||
Boolean,
|
||||
Date,
|
||||
DateTime,
|
||||
Enum,
|
||||
Integer,
|
||||
Numeric,
|
||||
Time,
|
||||
Uuid,
|
||||
)
|
||||
|
||||
from ..exceptions import InvalidFacetFilterError, NoSearchableFieldsError
|
||||
from ..exceptions import (
|
||||
InvalidFacetFilterError,
|
||||
InvalidSearchColumnError,
|
||||
NoSearchableFieldsError,
|
||||
UnsupportedFacetTypeError,
|
||||
)
|
||||
from ..types import FacetFieldType, SearchFieldType
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -82,6 +97,7 @@ def build_search_filters(
|
||||
search: str | SearchConfig,
|
||||
search_fields: Sequence[SearchFieldType] | None = None,
|
||||
default_fields: Sequence[SearchFieldType] | None = None,
|
||||
search_column: str | None = None,
|
||||
) -> tuple[list["ColumnElement[bool]"], list[InstrumentedAttribute[Any]]]:
|
||||
"""Build SQLAlchemy filter conditions for search.
|
||||
|
||||
@@ -90,6 +106,8 @@ def build_search_filters(
|
||||
search: Search string or SearchConfig
|
||||
search_fields: Fields specified per-call (takes priority)
|
||||
default_fields: Default fields (from ClassVar)
|
||||
search_column: Optional key to narrow search to a single field.
|
||||
Must match one of the resolved search field keys.
|
||||
|
||||
Returns:
|
||||
Tuple of (filter_conditions, joins_needed)
|
||||
@@ -116,6 +134,14 @@ def build_search_filters(
|
||||
if not fields:
|
||||
raise NoSearchableFieldsError(model)
|
||||
|
||||
# Narrow to a single column when search_column is specified
|
||||
if search_column is not None:
|
||||
keys = search_field_keys(fields)
|
||||
index = {k: f for k, f in zip(keys, fields)}
|
||||
if search_column not in index:
|
||||
raise InvalidSearchColumnError(search_column, sorted(index))
|
||||
fields = [index[search_column]]
|
||||
|
||||
query = config.query.strip()
|
||||
filters: list[ColumnElement[bool]] = []
|
||||
joins: list[InstrumentedAttribute[Any]] = []
|
||||
@@ -150,8 +176,13 @@ def build_search_filters(
|
||||
return filters, joins
|
||||
|
||||
|
||||
def search_field_keys(fields: Sequence[SearchFieldType]) -> list[str]:
|
||||
"""Return a human-readable key for each search field."""
|
||||
return facet_keys(fields)
|
||||
|
||||
|
||||
def facet_keys(facet_fields: Sequence[FacetFieldType]) -> list[str]:
|
||||
"""Return a key for each facet field, disambiguating duplicate column keys.
|
||||
"""Return a key for each facet field.
|
||||
|
||||
Args:
|
||||
facet_fields: Sequence of facet fields — either direct columns or
|
||||
@@ -160,22 +191,12 @@ def facet_keys(facet_fields: Sequence[FacetFieldType]) -> list[str]:
|
||||
Returns:
|
||||
A list of string keys, one per facet field, in the same order.
|
||||
"""
|
||||
raw: list[tuple[str, str | None]] = []
|
||||
keys: list[str] = []
|
||||
for field in facet_fields:
|
||||
if isinstance(field, tuple):
|
||||
rel = field[-2]
|
||||
column = field[-1]
|
||||
raw.append((column.key, rel.key))
|
||||
keys.append("__".join(el.key for el in field))
|
||||
else:
|
||||
raw.append((field.key, None))
|
||||
|
||||
counts = Counter(col_key for col_key, _ in raw)
|
||||
keys: list[str] = []
|
||||
for col_key, rel_key in raw:
|
||||
if counts[col_key] > 1 and rel_key is not None:
|
||||
keys.append(f"{rel_key}__{col_key}")
|
||||
else:
|
||||
keys.append(col_key)
|
||||
keys.append(field.key)
|
||||
return keys
|
||||
|
||||
|
||||
@@ -212,23 +233,47 @@ async def build_facets(
|
||||
rels = ()
|
||||
column = field
|
||||
|
||||
q = select(column).select_from(model).distinct()
|
||||
col_type = column.property.columns[0].type
|
||||
is_array = isinstance(col_type, ARRAY)
|
||||
|
||||
# Apply base joins (already done on main query, but needed here independently)
|
||||
if is_array:
|
||||
unnested = func.unnest(column).label(column.key)
|
||||
q = select(unnested).select_from(model).distinct()
|
||||
else:
|
||||
q = select(column).select_from(model).distinct()
|
||||
|
||||
# Apply base joins (deduplicated) — needed here independently
|
||||
seen_joins: set[str] = set()
|
||||
for rel in base_joins or []:
|
||||
q = q.outerjoin(rel)
|
||||
rel_key = str(rel)
|
||||
if rel_key not in seen_joins:
|
||||
seen_joins.add(rel_key)
|
||||
q = q.outerjoin(rel)
|
||||
|
||||
# Add any extra joins required by this facet field that aren't already in base_joins
|
||||
# Add any extra joins required by this facet field that aren't already applied
|
||||
for rel in rels:
|
||||
if str(rel) not in existing_join_keys:
|
||||
rel_key = str(rel)
|
||||
if rel_key not in existing_join_keys and rel_key not in seen_joins:
|
||||
seen_joins.add(rel_key)
|
||||
q = q.outerjoin(rel)
|
||||
|
||||
if base_filters:
|
||||
q = q.where(and_(*base_filters))
|
||||
|
||||
q = q.order_by(column)
|
||||
if is_array:
|
||||
q = q.order_by(unnested)
|
||||
else:
|
||||
q = q.order_by(column)
|
||||
result = await session.execute(q)
|
||||
values = [row[0] for row in result.all() if row[0] is not None]
|
||||
col_type = column.property.columns[0].type
|
||||
enum_class = getattr(col_type, "enum_class", None)
|
||||
values = [
|
||||
row[0].name
|
||||
if (enum_class is not None and isinstance(row[0], enum_class))
|
||||
else row[0]
|
||||
for row in result.all()
|
||||
if row[0] is not None
|
||||
]
|
||||
return key, values
|
||||
|
||||
pairs = await asyncio.gather(
|
||||
@@ -237,6 +282,22 @@ async def build_facets(
|
||||
return dict(pairs)
|
||||
|
||||
|
||||
_EQUALITY_TYPES = (String, Integer, Numeric, Date, DateTime, Time, Enum, Uuid)
|
||||
"""Column types that support equality / IN filtering in build_filter_by."""
|
||||
|
||||
|
||||
def _coerce_bool(value: Any) -> bool:
|
||||
"""Coerce a string value to a Python bool for Boolean column filtering."""
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
if isinstance(value, str):
|
||||
if value.lower() == "true":
|
||||
return True
|
||||
if value.lower() == "false":
|
||||
return False
|
||||
raise ValueError(f"Cannot coerce {value!r} to bool")
|
||||
|
||||
|
||||
def build_filter_by(
|
||||
filter_by: dict[str, Any],
|
||||
facet_fields: Sequence[FacetFieldType],
|
||||
@@ -282,9 +343,42 @@ def build_filter_by(
|
||||
joins.append(rel)
|
||||
added_join_keys.add(rel_key)
|
||||
|
||||
if isinstance(value, list):
|
||||
filters.append(column.in_(value))
|
||||
col_type = column.property.columns[0].type
|
||||
if isinstance(col_type, Boolean):
|
||||
coerce = _coerce_bool
|
||||
if isinstance(value, list):
|
||||
filters.append(column.in_([coerce(v) for v in value]))
|
||||
else:
|
||||
filters.append(column == coerce(value))
|
||||
elif isinstance(col_type, ARRAY):
|
||||
if isinstance(value, list):
|
||||
filters.append(column.overlap(value))
|
||||
else:
|
||||
filters.append(column.any(value))
|
||||
elif isinstance(col_type, Enum):
|
||||
enum_class = col_type.enum_class
|
||||
if enum_class is not None:
|
||||
|
||||
def _coerce_enum(v: Any) -> Any:
|
||||
if isinstance(v, enum_class):
|
||||
return v
|
||||
return enum_class[v] # lookup by name: "PENDING", "RED"
|
||||
|
||||
if isinstance(value, list):
|
||||
filters.append(column.in_([_coerce_enum(v) for v in value]))
|
||||
else:
|
||||
filters.append(column == _coerce_enum(value))
|
||||
else: # pragma: no cover
|
||||
if isinstance(value, list):
|
||||
filters.append(column.in_(value))
|
||||
else:
|
||||
filters.append(column == value)
|
||||
elif isinstance(col_type, _EQUALITY_TYPES):
|
||||
if isinstance(value, list):
|
||||
filters.append(column.in_(value))
|
||||
else:
|
||||
filters.append(column == value)
|
||||
else:
|
||||
filters.append(column == value)
|
||||
raise UnsupportedFacetTypeError(key, type(col_type).__name__)
|
||||
|
||||
return filters, joins
|
||||
|
||||
@@ -24,9 +24,12 @@ __all__ = [
|
||||
]
|
||||
|
||||
|
||||
_SessionT = TypeVar("_SessionT", bound=AsyncSession)
|
||||
|
||||
|
||||
def create_db_dependency(
|
||||
session_maker: async_sessionmaker[AsyncSession],
|
||||
) -> Callable[[], AsyncGenerator[AsyncSession, None]]:
|
||||
session_maker: async_sessionmaker[_SessionT],
|
||||
) -> Callable[[], AsyncGenerator[_SessionT, None]]:
|
||||
"""Create a FastAPI dependency for database sessions.
|
||||
|
||||
Creates a dependency function that yields a session and auto-commits
|
||||
@@ -54,8 +57,9 @@ def create_db_dependency(
|
||||
```
|
||||
"""
|
||||
|
||||
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
async def get_db() -> AsyncGenerator[_SessionT, None]:
|
||||
async with session_maker() as session:
|
||||
await session.connection()
|
||||
yield session
|
||||
if session.in_transaction():
|
||||
await session.commit()
|
||||
@@ -64,8 +68,8 @@ def create_db_dependency(
|
||||
|
||||
|
||||
def create_db_context(
|
||||
session_maker: async_sessionmaker[AsyncSession],
|
||||
) -> Callable[[], AbstractAsyncContextManager[AsyncSession]]:
|
||||
session_maker: async_sessionmaker[_SessionT],
|
||||
) -> Callable[[], AbstractAsyncContextManager[_SessionT]]:
|
||||
"""Create a context manager for database sessions.
|
||||
|
||||
Creates a context manager for use outside of FastAPI request handlers,
|
||||
|
||||
@@ -7,9 +7,11 @@ from .exceptions import (
|
||||
ForbiddenError,
|
||||
InvalidFacetFilterError,
|
||||
InvalidOrderFieldError,
|
||||
InvalidSearchColumnError,
|
||||
NoSearchableFieldsError,
|
||||
NotFoundError,
|
||||
UnauthorizedError,
|
||||
UnsupportedFacetTypeError,
|
||||
generate_error_responses,
|
||||
)
|
||||
from .handler import init_exceptions_handlers
|
||||
@@ -23,7 +25,9 @@ __all__ = [
|
||||
"init_exceptions_handlers",
|
||||
"InvalidFacetFilterError",
|
||||
"InvalidOrderFieldError",
|
||||
"InvalidSearchColumnError",
|
||||
"NoSearchableFieldsError",
|
||||
"NotFoundError",
|
||||
"UnauthorizedError",
|
||||
"UnsupportedFacetTypeError",
|
||||
]
|
||||
|
||||
@@ -144,6 +144,61 @@ class InvalidFacetFilterError(ApiException):
|
||||
)
|
||||
|
||||
|
||||
class UnsupportedFacetTypeError(ApiException):
|
||||
"""Raised when a facet field has a column type not supported by filter_by."""
|
||||
|
||||
api_error = ApiError(
|
||||
code=400,
|
||||
msg="Unsupported Facet Type",
|
||||
desc="The column type is not supported for facet filtering.",
|
||||
err_code="FACET-TYPE-400",
|
||||
)
|
||||
|
||||
def __init__(self, key: str, col_type: str) -> None:
|
||||
"""Initialize the exception.
|
||||
|
||||
Args:
|
||||
key: The facet field key.
|
||||
col_type: The unsupported column type name.
|
||||
"""
|
||||
self.key = key
|
||||
self.col_type = col_type
|
||||
super().__init__(
|
||||
desc=(
|
||||
f"Facet field '{key}' has unsupported column type '{col_type}'. "
|
||||
f"Supported types: String, Integer, Numeric, Boolean, "
|
||||
f"Date, DateTime, Time, Enum, Uuid, ARRAY."
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class InvalidSearchColumnError(ApiException):
|
||||
"""Raised when search_column is not one of the configured searchable fields."""
|
||||
|
||||
api_error = ApiError(
|
||||
code=400,
|
||||
msg="Invalid Search Column",
|
||||
desc="The requested search column is not a configured searchable field.",
|
||||
err_code="SEARCH-COL-400",
|
||||
)
|
||||
|
||||
def __init__(self, column: str, valid_columns: list[str]) -> None:
|
||||
"""Initialize the exception.
|
||||
|
||||
Args:
|
||||
column: The unknown search column provided by the caller.
|
||||
valid_columns: List of valid search column keys.
|
||||
"""
|
||||
self.column = column
|
||||
self.valid_columns = valid_columns
|
||||
super().__init__(
|
||||
desc=(
|
||||
f"'{column}' is not a searchable column. "
|
||||
f"Valid columns: {valid_columns}."
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class InvalidOrderFieldError(ApiException):
|
||||
"""Raised when order_by contains a field not in the allowed order fields."""
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ def init_exceptions_handlers(app: FastAPI) -> FastAPI:
|
||||
"""
|
||||
_register_exception_handlers(app)
|
||||
_original_openapi = app.openapi
|
||||
app.openapi = lambda: _patched_openapi(app, _original_openapi) # type: ignore[method-assign]
|
||||
app.openapi = lambda: _patched_openapi(app, _original_openapi) # type: ignore[method-assign] # ty:ignore[invalid-assignment]
|
||||
return app
|
||||
|
||||
|
||||
@@ -122,7 +122,7 @@ def _format_validation_error(
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||
content=error_response.model_dump(),
|
||||
)
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from collections.abc import Callable, Sequence
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Any, cast
|
||||
|
||||
from sqlalchemy.orm import DeclarativeBase
|
||||
@@ -12,6 +13,13 @@ from .enum import Context
|
||||
logger = get_logger()
|
||||
|
||||
|
||||
def _normalize_contexts(
|
||||
contexts: list[str | Enum] | tuple[str | Enum, ...],
|
||||
) -> list[str]:
|
||||
"""Convert a sequence of any Enum subclass and/or plain strings to a list of strings."""
|
||||
return [c.value if isinstance(c, Enum) else c for c in contexts]
|
||||
|
||||
|
||||
@dataclass
|
||||
class Fixture:
|
||||
"""A fixture definition with metadata."""
|
||||
@@ -50,26 +58,51 @@ class FixtureRegistry:
|
||||
Post(id=1, title="Test", user_id=1),
|
||||
]
|
||||
```
|
||||
|
||||
Fixtures with the same name may be registered for **different** contexts.
|
||||
When multiple contexts are loaded together, their instances are merged:
|
||||
|
||||
```python
|
||||
@fixtures.register(contexts=[Context.BASE])
|
||||
def users():
|
||||
return [User(id=1, username="admin")]
|
||||
|
||||
@fixtures.register(contexts=[Context.TESTING])
|
||||
def users():
|
||||
return [User(id=2, username="tester")]
|
||||
# load_fixtures_by_context(..., Context.BASE, Context.TESTING)
|
||||
# → loads both User(admin) and User(tester) under the "users" name
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
contexts: list[str | Context] | None = None,
|
||||
contexts: list[str | Enum] | None = None,
|
||||
) -> None:
|
||||
self._fixtures: dict[str, Fixture] = {}
|
||||
self._fixtures: dict[str, list[Fixture]] = {}
|
||||
self._default_contexts: list[str] | None = (
|
||||
[c.value if isinstance(c, Context) else c for c in contexts]
|
||||
if contexts
|
||||
else None
|
||||
_normalize_contexts(contexts) if contexts else None
|
||||
)
|
||||
|
||||
def _validate_no_context_overlap(self, name: str, new_contexts: list[str]) -> None:
|
||||
"""Raise ``ValueError`` if any existing variant for *name* overlaps."""
|
||||
existing_variants = self._fixtures.get(name, [])
|
||||
new_set = set(new_contexts)
|
||||
for variant in existing_variants:
|
||||
if set(variant.contexts) & new_set:
|
||||
raise ValueError(
|
||||
f"Fixture '{name}' already exists in the current registry "
|
||||
f"with overlapping contexts. Use distinct context sets for "
|
||||
f"each variant of the same fixture name."
|
||||
)
|
||||
|
||||
def register(
|
||||
self,
|
||||
func: Callable[[], Sequence[DeclarativeBase]] | None = None,
|
||||
*,
|
||||
name: str | None = None,
|
||||
depends_on: list[str] | None = None,
|
||||
contexts: list[str | Context] | None = None,
|
||||
contexts: list[str | Enum] | None = None,
|
||||
) -> Callable[..., Any]:
|
||||
"""Register a fixture function.
|
||||
|
||||
@@ -79,7 +112,8 @@ class FixtureRegistry:
|
||||
func: Fixture function returning list of model instances
|
||||
name: Fixture name (defaults to function name)
|
||||
depends_on: List of fixture names this depends on
|
||||
contexts: List of contexts this fixture belongs to
|
||||
contexts: List of contexts this fixture belongs to. Both
|
||||
:class:`Context` enum values and plain strings are accepted.
|
||||
|
||||
Example:
|
||||
```python
|
||||
@@ -90,7 +124,6 @@ class FixtureRegistry:
|
||||
@fixtures.register(depends_on=["roles"], contexts=[Context.TESTING])
|
||||
def test_users():
|
||||
return [User(id=1, username="test", role_id=1)]
|
||||
```
|
||||
"""
|
||||
|
||||
def decorator(
|
||||
@@ -98,19 +131,20 @@ class FixtureRegistry:
|
||||
) -> Callable[[], Sequence[DeclarativeBase]]:
|
||||
fixture_name = name or cast(Any, fn).__name__
|
||||
if contexts is not None:
|
||||
fixture_contexts = [
|
||||
c.value if isinstance(c, Context) else c for c in contexts
|
||||
]
|
||||
fixture_contexts = _normalize_contexts(contexts)
|
||||
elif self._default_contexts is not None:
|
||||
fixture_contexts = self._default_contexts
|
||||
else:
|
||||
fixture_contexts = [Context.BASE.value]
|
||||
|
||||
self._fixtures[fixture_name] = Fixture(
|
||||
name=fixture_name,
|
||||
func=fn,
|
||||
depends_on=depends_on or [],
|
||||
contexts=fixture_contexts,
|
||||
self._validate_no_context_overlap(fixture_name, fixture_contexts)
|
||||
self._fixtures.setdefault(fixture_name, []).append(
|
||||
Fixture(
|
||||
name=fixture_name,
|
||||
func=fn,
|
||||
depends_on=depends_on or [],
|
||||
contexts=fixture_contexts,
|
||||
)
|
||||
)
|
||||
return fn
|
||||
|
||||
@@ -121,11 +155,14 @@ class FixtureRegistry:
|
||||
def include_registry(self, registry: "FixtureRegistry") -> None:
|
||||
"""Include another `FixtureRegistry` in the same current `FixtureRegistry`.
|
||||
|
||||
Fixtures with the same name are allowed as long as their context sets
|
||||
do not overlap. Conflicting contexts raise :class:`ValueError`.
|
||||
|
||||
Args:
|
||||
registry: The `FixtureRegistry` to include
|
||||
|
||||
Raises:
|
||||
ValueError: If a fixture name already exists in the current registry
|
||||
ValueError: If a fixture name already exists with overlapping contexts
|
||||
|
||||
Example:
|
||||
```python
|
||||
@@ -139,31 +176,73 @@ class FixtureRegistry:
|
||||
registry.include_registry(registry=dev_registry)
|
||||
```
|
||||
"""
|
||||
for name, fixture in registry._fixtures.items():
|
||||
if name in self._fixtures:
|
||||
raise ValueError(
|
||||
f"Fixture '{name}' already exists in the current registry"
|
||||
)
|
||||
self._fixtures[name] = fixture
|
||||
for name, variants in registry._fixtures.items():
|
||||
for fixture in variants:
|
||||
self._validate_no_context_overlap(name, fixture.contexts)
|
||||
self._fixtures.setdefault(name, []).append(fixture)
|
||||
|
||||
def get(self, name: str) -> Fixture:
|
||||
"""Get a fixture by name."""
|
||||
"""Get a fixture by name.
|
||||
|
||||
Raises:
|
||||
KeyError: If no fixture with *name* is registered.
|
||||
ValueError: If the fixture has multiple context variants — use
|
||||
:meth:`get_variants` in that case.
|
||||
"""
|
||||
if name not in self._fixtures:
|
||||
raise KeyError(f"Fixture '{name}' not found")
|
||||
return self._fixtures[name]
|
||||
variants = self._fixtures[name]
|
||||
if len(variants) > 1:
|
||||
raise ValueError(
|
||||
f"Fixture '{name}' has {len(variants)} context variants. "
|
||||
f"Use get_variants('{name}') to retrieve them."
|
||||
)
|
||||
return variants[0]
|
||||
|
||||
def get_variants(self, name: str, *contexts: str | Enum) -> list[Fixture]:
|
||||
"""Return all registered variants for *name*, optionally filtered by context.
|
||||
|
||||
Args:
|
||||
name: Fixture name.
|
||||
*contexts: If given, only return variants whose context set
|
||||
intersects with these values. Both :class:`Context` enum
|
||||
values and plain strings are accepted.
|
||||
|
||||
Returns:
|
||||
List of matching :class:`Fixture` objects (may be empty when a
|
||||
context filter is applied and nothing matches).
|
||||
|
||||
Raises:
|
||||
KeyError: If no fixture with *name* is registered.
|
||||
"""
|
||||
if name not in self._fixtures:
|
||||
raise KeyError(f"Fixture '{name}' not found")
|
||||
variants = self._fixtures[name]
|
||||
if not contexts:
|
||||
return list(variants)
|
||||
context_values = set(_normalize_contexts(contexts))
|
||||
return [v for v in variants if set(v.contexts) & context_values]
|
||||
|
||||
def get_all(self) -> list[Fixture]:
|
||||
"""Get all registered fixtures."""
|
||||
return list(self._fixtures.values())
|
||||
"""Get all registered fixtures (all variants of all names)."""
|
||||
return [f for variants in self._fixtures.values() for f in variants]
|
||||
|
||||
def get_by_context(self, *contexts: str | Context) -> list[Fixture]:
|
||||
def get_by_context(self, *contexts: str | Enum) -> list[Fixture]:
|
||||
"""Get fixtures for specific contexts."""
|
||||
context_values = {c.value if isinstance(c, Context) else c for c in contexts}
|
||||
return [f for f in self._fixtures.values() if set(f.contexts) & context_values]
|
||||
context_values = set(_normalize_contexts(contexts))
|
||||
return [
|
||||
f
|
||||
for variants in self._fixtures.values()
|
||||
for f in variants
|
||||
if set(f.contexts) & context_values
|
||||
]
|
||||
|
||||
def resolve_dependencies(self, *names: str) -> list[str]:
|
||||
"""Resolve fixture dependencies in topological order.
|
||||
|
||||
When a fixture name has multiple context variants, the union of all
|
||||
variants' ``depends_on`` lists is used.
|
||||
|
||||
Args:
|
||||
*names: Fixture names to resolve
|
||||
|
||||
@@ -185,9 +264,20 @@ class FixtureRegistry:
|
||||
raise ValueError(f"Circular dependency detected: {name}")
|
||||
|
||||
visiting.add(name)
|
||||
fixture = self.get(name)
|
||||
variants = self._fixtures.get(name)
|
||||
if variants is None:
|
||||
raise KeyError(f"Fixture '{name}' not found")
|
||||
|
||||
for dep in fixture.depends_on:
|
||||
# Union of depends_on across all variants, preserving first-seen order.
|
||||
seen_deps: set[str] = set()
|
||||
all_deps: list[str] = []
|
||||
for variant in variants:
|
||||
for dep in variant.depends_on:
|
||||
if dep not in seen_deps:
|
||||
all_deps.append(dep)
|
||||
seen_deps.add(dep)
|
||||
|
||||
for dep in all_deps:
|
||||
visit(dep)
|
||||
|
||||
visiting.remove(name)
|
||||
@@ -199,7 +289,7 @@ class FixtureRegistry:
|
||||
|
||||
return resolved
|
||||
|
||||
def resolve_context_dependencies(self, *contexts: str | Context) -> list[str]:
|
||||
def resolve_context_dependencies(self, *contexts: str | Enum) -> list[str]:
|
||||
"""Resolve all fixtures for contexts with dependencies.
|
||||
|
||||
Args:
|
||||
@@ -209,7 +299,9 @@ class FixtureRegistry:
|
||||
List of fixture names in load order
|
||||
"""
|
||||
context_fixtures = self.get_by_context(*contexts)
|
||||
names = [f.name for f in context_fixtures]
|
||||
# Deduplicate names while preserving first-seen order (a name can
|
||||
# appear multiple times if it has variants in different contexts).
|
||||
names = list(dict.fromkeys(f.name for f in context_fixtures))
|
||||
|
||||
all_deps: set[str] = set()
|
||||
for name in names:
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
"""Fixture loading utilities for database seeding."""
|
||||
|
||||
from collections.abc import Callable, Sequence
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import inspect as sa_inspect
|
||||
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import DeclarativeBase
|
||||
|
||||
@@ -10,23 +13,177 @@ from ..db import get_transaction
|
||||
from ..logger import get_logger
|
||||
from ..types import ModelType
|
||||
from .enum import LoadStrategy
|
||||
from .registry import Context, FixtureRegistry
|
||||
from .registry import FixtureRegistry, _normalize_contexts
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
||||
def _instance_to_dict(instance: DeclarativeBase) -> dict[str, Any]:
|
||||
"""Extract column values from a model instance, skipping unset server-default columns."""
|
||||
state = sa_inspect(instance)
|
||||
state_dict = state.dict
|
||||
result: dict[str, Any] = {}
|
||||
for prop in state.mapper.column_attrs:
|
||||
if prop.key not in state_dict:
|
||||
continue
|
||||
val = state_dict[prop.key]
|
||||
if val is None:
|
||||
col = prop.columns[0]
|
||||
|
||||
if (
|
||||
col.server_default is not None
|
||||
or (col.default is not None and col.default.is_callable)
|
||||
or col.autoincrement is True
|
||||
):
|
||||
continue
|
||||
result[prop.key] = val
|
||||
return result
|
||||
|
||||
|
||||
def _group_by_type(
|
||||
instances: list[DeclarativeBase],
|
||||
) -> list[tuple[type[DeclarativeBase], list[DeclarativeBase]]]:
|
||||
"""Group instances by their concrete model class, preserving insertion order."""
|
||||
groups: dict[type[DeclarativeBase], list[DeclarativeBase]] = {}
|
||||
for instance in instances:
|
||||
groups.setdefault(type(instance), []).append(instance)
|
||||
return list(groups.items())
|
||||
|
||||
|
||||
def _group_by_column_set(
|
||||
dicts: list[dict[str, Any]],
|
||||
instances: list[DeclarativeBase],
|
||||
) -> list[tuple[list[dict[str, Any]], list[DeclarativeBase]]]:
|
||||
"""Group (dict, instance) pairs by their dict key sets."""
|
||||
groups: dict[
|
||||
frozenset[str], tuple[list[dict[str, Any]], list[DeclarativeBase]]
|
||||
] = {}
|
||||
for d, inst in zip(dicts, instances):
|
||||
key = frozenset(d)
|
||||
if key not in groups:
|
||||
groups[key] = ([], [])
|
||||
groups[key][0].append(d)
|
||||
groups[key][1].append(inst)
|
||||
return list(groups.values())
|
||||
|
||||
|
||||
async def _batch_insert(
|
||||
session: AsyncSession,
|
||||
model_cls: type[DeclarativeBase],
|
||||
instances: list[DeclarativeBase],
|
||||
) -> None:
|
||||
"""INSERT all instances — raises on conflict (no duplicate handling)."""
|
||||
dicts = [_instance_to_dict(i) for i in instances]
|
||||
for group_dicts, _ in _group_by_column_set(dicts, instances):
|
||||
await session.execute(pg_insert(model_cls).values(group_dicts))
|
||||
|
||||
|
||||
async def _batch_merge(
|
||||
session: AsyncSession,
|
||||
model_cls: type[DeclarativeBase],
|
||||
instances: list[DeclarativeBase],
|
||||
) -> None:
|
||||
"""UPSERT: insert new rows, update existing ones with the provided values."""
|
||||
mapper = model_cls.__mapper__
|
||||
pk_names = [col.name for col in mapper.primary_key]
|
||||
pk_names_set = set(pk_names)
|
||||
non_pk_cols = [
|
||||
prop.key
|
||||
for prop in mapper.column_attrs
|
||||
if not any(col.name in pk_names_set for col in prop.columns)
|
||||
]
|
||||
|
||||
dicts = [_instance_to_dict(i) for i in instances]
|
||||
for group_dicts, _ in _group_by_column_set(dicts, instances):
|
||||
stmt = pg_insert(model_cls).values(group_dicts)
|
||||
|
||||
inserted_keys = set(group_dicts[0])
|
||||
update_cols = [col for col in non_pk_cols if col in inserted_keys]
|
||||
|
||||
if update_cols:
|
||||
stmt = stmt.on_conflict_do_update(
|
||||
index_elements=pk_names,
|
||||
set_={col: stmt.excluded[col] for col in update_cols},
|
||||
)
|
||||
else:
|
||||
stmt = stmt.on_conflict_do_nothing(index_elements=pk_names)
|
||||
|
||||
await session.execute(stmt)
|
||||
|
||||
|
||||
async def _batch_skip_existing(
|
||||
session: AsyncSession,
|
||||
model_cls: type[DeclarativeBase],
|
||||
instances: list[DeclarativeBase],
|
||||
) -> list[DeclarativeBase]:
|
||||
"""INSERT only rows that do not already exist; return the inserted ones."""
|
||||
mapper = model_cls.__mapper__
|
||||
pk_names = [col.name for col in mapper.primary_key]
|
||||
|
||||
no_pk: list[DeclarativeBase] = []
|
||||
with_pk_pairs: list[tuple[DeclarativeBase, Any]] = []
|
||||
for inst in instances:
|
||||
pk = _get_primary_key(inst)
|
||||
if pk is None:
|
||||
no_pk.append(inst)
|
||||
else:
|
||||
with_pk_pairs.append((inst, pk))
|
||||
|
||||
loaded: list[DeclarativeBase] = list(no_pk)
|
||||
if no_pk:
|
||||
no_pk_dicts = [_instance_to_dict(i) for i in no_pk]
|
||||
for group_dicts, _ in _group_by_column_set(no_pk_dicts, no_pk):
|
||||
await session.execute(pg_insert(model_cls).values(group_dicts))
|
||||
|
||||
if with_pk_pairs:
|
||||
with_pk = [i for i, _ in with_pk_pairs]
|
||||
with_pk_dicts = [_instance_to_dict(i) for i in with_pk]
|
||||
for group_dicts, group_insts in _group_by_column_set(with_pk_dicts, with_pk):
|
||||
stmt = (
|
||||
pg_insert(model_cls)
|
||||
.values(group_dicts)
|
||||
.on_conflict_do_nothing(index_elements=pk_names)
|
||||
)
|
||||
result = await session.execute(stmt.returning(*mapper.primary_key))
|
||||
inserted_pks = {
|
||||
row[0] if len(pk_names) == 1 else tuple(row) for row in result
|
||||
}
|
||||
loaded.extend(
|
||||
inst
|
||||
for inst, pk in zip(
|
||||
group_insts, [_get_primary_key(i) for i in group_insts]
|
||||
)
|
||||
if pk in inserted_pks
|
||||
)
|
||||
|
||||
return loaded
|
||||
|
||||
|
||||
async def _load_ordered(
|
||||
session: AsyncSession,
|
||||
registry: FixtureRegistry,
|
||||
ordered_names: list[str],
|
||||
strategy: LoadStrategy,
|
||||
contexts: tuple[str, ...] | None = None,
|
||||
) -> dict[str, list[DeclarativeBase]]:
|
||||
"""Load fixtures in order."""
|
||||
"""Load fixtures in order using batch Core INSERT statements."""
|
||||
results: dict[str, list[DeclarativeBase]] = {}
|
||||
|
||||
for name in ordered_names:
|
||||
fixture = registry.get(name)
|
||||
instances = list(fixture.func())
|
||||
variants = (
|
||||
registry.get_variants(name, *contexts)
|
||||
if contexts is not None
|
||||
else registry.get_variants(name)
|
||||
)
|
||||
|
||||
if contexts is not None and not variants:
|
||||
variants = registry.get_variants(name)
|
||||
|
||||
if not variants:
|
||||
results[name] = []
|
||||
continue
|
||||
|
||||
instances = [inst for v in variants for inst in v.func()]
|
||||
|
||||
if not instances:
|
||||
results[name] = []
|
||||
@@ -36,25 +193,17 @@ async def _load_ordered(
|
||||
loaded: list[DeclarativeBase] = []
|
||||
|
||||
async with get_transaction(session):
|
||||
for instance in instances:
|
||||
if strategy == LoadStrategy.INSERT:
|
||||
session.add(instance)
|
||||
loaded.append(instance)
|
||||
|
||||
elif strategy == LoadStrategy.MERGE:
|
||||
merged = await session.merge(instance)
|
||||
loaded.append(merged)
|
||||
|
||||
else: # LoadStrategy.SKIP_EXISTING
|
||||
pk = _get_primary_key(instance)
|
||||
if pk is not None:
|
||||
existing = await session.get(type(instance), pk)
|
||||
if existing is None:
|
||||
session.add(instance)
|
||||
loaded.append(instance)
|
||||
else:
|
||||
session.add(instance)
|
||||
loaded.append(instance)
|
||||
for model_cls, group in _group_by_type(instances):
|
||||
match strategy:
|
||||
case LoadStrategy.INSERT:
|
||||
await _batch_insert(session, model_cls, group)
|
||||
loaded.extend(group)
|
||||
case LoadStrategy.MERGE:
|
||||
await _batch_merge(session, model_cls, group)
|
||||
loaded.extend(group)
|
||||
case LoadStrategy.SKIP_EXISTING:
|
||||
inserted = await _batch_skip_existing(session, model_cls, group)
|
||||
loaded.extend(inserted)
|
||||
|
||||
results[name] = loaded
|
||||
logger.info(f"Loaded fixture '{name}': {len(loaded)} {model_name}(s)")
|
||||
@@ -109,6 +258,8 @@ async def load_fixtures(
|
||||
) -> dict[str, list[DeclarativeBase]]:
|
||||
"""Load specific fixtures by name with dependencies.
|
||||
|
||||
All context variants of each requested fixture are loaded and merged.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
registry: Fixture registry
|
||||
@@ -125,7 +276,7 @@ async def load_fixtures(
|
||||
async def load_fixtures_by_context(
|
||||
session: AsyncSession,
|
||||
registry: FixtureRegistry,
|
||||
*contexts: str | Context,
|
||||
*contexts: str | Enum,
|
||||
strategy: LoadStrategy = LoadStrategy.MERGE,
|
||||
) -> dict[str, list[DeclarativeBase]]:
|
||||
"""Load all fixtures for specific contexts.
|
||||
@@ -133,11 +284,15 @@ async def load_fixtures_by_context(
|
||||
Args:
|
||||
session: Database session
|
||||
registry: Fixture registry
|
||||
*contexts: Contexts to load (e.g., Context.BASE, Context.TESTING)
|
||||
*contexts: Contexts to load (e.g., ``Context.BASE``, ``Context.TESTING``,
|
||||
or plain strings for custom contexts)
|
||||
strategy: How to handle existing records
|
||||
|
||||
Returns:
|
||||
Dict mapping fixture names to loaded instances
|
||||
"""
|
||||
context_strings = tuple(_normalize_contexts(contexts))
|
||||
ordered = registry.resolve_context_dependencies(*contexts)
|
||||
return await _load_ordered(session, registry, ordered, strategy)
|
||||
return await _load_ordered(
|
||||
session, registry, ordered, strategy, contexts=context_strings
|
||||
)
|
||||
|
||||
@@ -66,7 +66,7 @@ def configure_logging(
|
||||
_SENTINEL = object()
|
||||
|
||||
|
||||
def get_logger(name: str | None = _SENTINEL) -> logging.Logger: # type: ignore[assignment]
|
||||
def get_logger(name: str | None = _SENTINEL) -> logging.Logger: # type: ignore[assignment] # ty:ignore[invalid-parameter-default]
|
||||
"""Return a logger with the given *name*.
|
||||
|
||||
A thin convenience wrapper around :func:`logging.getLogger` that keeps
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Prometheus metrics endpoint for FastAPI applications."""
|
||||
|
||||
import asyncio
|
||||
import inspect
|
||||
import os
|
||||
|
||||
from fastapi import FastAPI
|
||||
@@ -55,10 +55,10 @@ def init_metrics(
|
||||
|
||||
# Partition collectors and cache env check at startup — both are stable for the app lifetime.
|
||||
async_collectors = [
|
||||
c for c in registry.get_collectors() if asyncio.iscoroutinefunction(c.func)
|
||||
c for c in registry.get_collectors() if inspect.iscoroutinefunction(c.func)
|
||||
]
|
||||
sync_collectors = [
|
||||
c for c in registry.get_collectors() if not asyncio.iscoroutinefunction(c.func)
|
||||
c for c in registry.get_collectors() if not inspect.iscoroutinefunction(c.func)
|
||||
]
|
||||
multiprocess_mode = _is_multiprocess()
|
||||
|
||||
|
||||
@@ -7,15 +7,15 @@ from .columns import (
|
||||
UUIDv7Mixin,
|
||||
UpdatedAtMixin,
|
||||
)
|
||||
from .watched import ModelEvent, WatchedFieldsMixin, watch
|
||||
from .watched import EventSession, ModelEvent, listens_for
|
||||
|
||||
__all__ = [
|
||||
"EventSession",
|
||||
"ModelEvent",
|
||||
"UUIDMixin",
|
||||
"UUIDv7Mixin",
|
||||
"CreatedAtMixin",
|
||||
"UpdatedAtMixin",
|
||||
"TimestampMixin",
|
||||
"WatchedFieldsMixin",
|
||||
"watch",
|
||||
"listens_for",
|
||||
]
|
||||
|
||||
@@ -6,14 +6,6 @@ from datetime import datetime
|
||||
from sqlalchemy import DateTime, Uuid, text
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
__all__ = [
|
||||
"UUIDMixin",
|
||||
"UUIDv7Mixin",
|
||||
"CreatedAtMixin",
|
||||
"UpdatedAtMixin",
|
||||
"TimestampMixin",
|
||||
]
|
||||
|
||||
|
||||
class UUIDMixin:
|
||||
"""Mixin that adds a UUID primary key auto-generated by the database."""
|
||||
|
||||
@@ -1,58 +1,127 @@
|
||||
"""Field-change monitoring via SQLAlchemy session events."""
|
||||
|
||||
import asyncio
|
||||
import weakref
|
||||
from collections.abc import Awaitable
|
||||
import inspect
|
||||
from collections.abc import Callable
|
||||
from enum import Enum
|
||||
from typing import Any, TypeVar
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy import inspect as sa_inspect
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm.attributes import set_committed_value as _sa_set_committed_value
|
||||
|
||||
from ..logger import get_logger
|
||||
|
||||
__all__ = ["ModelEvent", "WatchedFieldsMixin", "watch"]
|
||||
|
||||
_logger = get_logger()
|
||||
_T = TypeVar("_T")
|
||||
_CALLBACK_ERROR_MSG = "WatchedFieldsMixin callback raised an unhandled exception"
|
||||
_WATCHED_FIELDS: weakref.WeakKeyDictionary[type, list[str]] = (
|
||||
weakref.WeakKeyDictionary()
|
||||
)
|
||||
_SESSION_PENDING_NEW = "_ft_pending_new"
|
||||
_SESSION_CREATES = "_ft_creates"
|
||||
_SESSION_DELETES = "_ft_deletes"
|
||||
_SESSION_UPDATES = "_ft_updates"
|
||||
|
||||
|
||||
class ModelEvent(str, Enum):
|
||||
"""Event types emitted by :class:`WatchedFieldsMixin`."""
|
||||
"""Event types dispatched by :class:`EventSession`."""
|
||||
|
||||
CREATE = "create"
|
||||
DELETE = "delete"
|
||||
UPDATE = "update"
|
||||
|
||||
|
||||
def watch(*fields: str) -> Any:
|
||||
"""Class decorator to filter which fields trigger ``on_update``.
|
||||
_CALLBACK_ERROR_MSG = "Event callback raised an unhandled exception"
|
||||
_SESSION_CREATES = "_ft_creates"
|
||||
_SESSION_DELETES = "_ft_deletes"
|
||||
_SESSION_UPDATES = "_ft_updates"
|
||||
_DEFERRED_STRATEGY_KEY = (("deferred", True), ("instrument", True))
|
||||
_EVENT_HANDLERS: dict[tuple[type, ModelEvent], list[Callable[..., Any]]] = {}
|
||||
_WATCHED_MODELS: set[type] = set()
|
||||
_WATCHED_CACHE: dict[type, bool] = {}
|
||||
_HANDLER_CACHE: dict[tuple[type, ModelEvent], list[Callable[..., Any]]] = {}
|
||||
|
||||
|
||||
def _invalidate_caches() -> None:
|
||||
"""Clear lookup caches after handler registration."""
|
||||
_WATCHED_CACHE.clear()
|
||||
_HANDLER_CACHE.clear()
|
||||
|
||||
|
||||
def listens_for(
|
||||
model_class: type,
|
||||
event_types: list[ModelEvent] | None = None,
|
||||
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
||||
"""Register a callback for one or more model lifecycle events.
|
||||
|
||||
Args:
|
||||
*fields: One or more field names to watch. At least one name is required.
|
||||
|
||||
Raises:
|
||||
ValueError: If called with no field names.
|
||||
model_class: The SQLAlchemy model class to listen on.
|
||||
event_types: List of :class:`ModelEvent` values to listen for.
|
||||
Defaults to all event types.
|
||||
"""
|
||||
if not fields:
|
||||
raise ValueError("@watch requires at least one field name.")
|
||||
evs = event_types if event_types is not None else list(ModelEvent)
|
||||
|
||||
def decorator(cls: type[_T]) -> type[_T]:
|
||||
_WATCHED_FIELDS[cls] = list(fields)
|
||||
return cls
|
||||
def decorator(fn: Callable[..., Any]) -> Callable[..., Any]:
|
||||
for ev in evs:
|
||||
_EVENT_HANDLERS.setdefault((model_class, ev), []).append(fn)
|
||||
_WATCHED_MODELS.add(model_class)
|
||||
_invalidate_caches()
|
||||
return fn
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def _is_watched(obj: Any) -> bool:
|
||||
"""Return True if *obj*'s type (or any ancestor) has registered handlers."""
|
||||
cls = type(obj)
|
||||
try:
|
||||
return _WATCHED_CACHE[cls]
|
||||
except KeyError:
|
||||
result = any(klass in _WATCHED_MODELS for klass in cls.__mro__)
|
||||
_WATCHED_CACHE[cls] = result
|
||||
return result
|
||||
|
||||
|
||||
def _get_handlers(cls: type, ev: ModelEvent) -> list[Callable[..., Any]]:
|
||||
"""Return registered handlers for *cls* and *ev*, walking the MRO."""
|
||||
key = (cls, ev)
|
||||
try:
|
||||
return _HANDLER_CACHE[key]
|
||||
except KeyError:
|
||||
handlers: list[Callable[..., Any]] = []
|
||||
for klass in cls.__mro__:
|
||||
handlers.extend(_EVENT_HANDLERS.get((klass, ev), []))
|
||||
_HANDLER_CACHE[key] = handlers
|
||||
return handlers
|
||||
|
||||
|
||||
def _snapshot_column_attrs(obj: Any) -> dict[str, Any]:
|
||||
"""Read currently-loaded column values into a plain dict."""
|
||||
state = sa_inspect(obj) # InstanceState
|
||||
state_dict = state.dict
|
||||
snapshot: dict[str, Any] = {}
|
||||
for prop in state.mapper.column_attrs:
|
||||
if prop.key in state_dict:
|
||||
snapshot[prop.key] = state_dict[prop.key]
|
||||
elif ( # pragma: no cover
|
||||
not state.expired
|
||||
and prop.strategy_key != _DEFERRED_STRATEGY_KEY
|
||||
and all(
|
||||
col.nullable
|
||||
and col.server_default is None
|
||||
and col.server_onupdate is None
|
||||
for col in prop.columns
|
||||
)
|
||||
):
|
||||
snapshot[prop.key] = None
|
||||
return snapshot
|
||||
|
||||
|
||||
def _get_watched_fields(cls: type) -> tuple[str, ...] | None:
|
||||
"""Return the watched fields for *cls*."""
|
||||
fields = getattr(cls, "__watched_fields__", None)
|
||||
if fields is not None and (
|
||||
not isinstance(fields, tuple) or not all(isinstance(f, str) for f in fields)
|
||||
):
|
||||
raise TypeError(
|
||||
f"{cls.__name__}.__watched_fields__ must be a tuple[str, ...], "
|
||||
f"got {type(fields).__name__}"
|
||||
)
|
||||
return fields
|
||||
|
||||
|
||||
def _upsert_changes(
|
||||
pending: dict[int, tuple[Any, dict[str, dict[str, Any]]]],
|
||||
obj: Any,
|
||||
@@ -73,32 +142,30 @@ def _upsert_changes(
|
||||
|
||||
@event.listens_for(AsyncSession.sync_session_class, "after_flush")
|
||||
def _after_flush(session: Any, flush_context: Any) -> None:
|
||||
# New objects: capture references while session.new is still populated.
|
||||
# Values are read in _after_flush_postexec once RETURNING has been processed.
|
||||
# New objects: capture reference. Attributes will be refreshed after commit.
|
||||
for obj in session.new:
|
||||
if isinstance(obj, WatchedFieldsMixin):
|
||||
session.info.setdefault(_SESSION_PENDING_NEW, []).append(obj)
|
||||
if _is_watched(obj):
|
||||
session.info.setdefault(_SESSION_CREATES, []).append(obj)
|
||||
|
||||
# Deleted objects: capture before they leave the identity map.
|
||||
# Deleted objects: snapshot now while attributes are still loaded.
|
||||
for obj in session.deleted:
|
||||
if isinstance(obj, WatchedFieldsMixin):
|
||||
session.info.setdefault(_SESSION_DELETES, []).append(obj)
|
||||
if _is_watched(obj):
|
||||
snapshot = _snapshot_column_attrs(obj)
|
||||
session.info.setdefault(_SESSION_DELETES, []).append((obj, snapshot))
|
||||
|
||||
# Dirty objects: read old/new from SQLAlchemy attribute history.
|
||||
for obj in session.dirty:
|
||||
if not isinstance(obj, WatchedFieldsMixin):
|
||||
if not _is_watched(obj):
|
||||
continue
|
||||
|
||||
# None = not in dict = watch all fields; list = specific fields only
|
||||
watched = _WATCHED_FIELDS.get(type(obj))
|
||||
watched = _get_watched_fields(type(obj))
|
||||
changes: dict[str, dict[str, Any]] = {}
|
||||
|
||||
inst_attrs = sa_inspect(obj).attrs
|
||||
attrs = (
|
||||
# Specific fields
|
||||
((field, sa_inspect(obj).attrs[field]) for field in watched)
|
||||
((field, inst_attrs[field]) for field in watched)
|
||||
if watched is not None
|
||||
# All mapped fields
|
||||
else ((s.key, s) for s in sa_inspect(obj).attrs)
|
||||
else ((s.key, s) for s in inst_attrs)
|
||||
)
|
||||
for field, attr_state in attrs:
|
||||
history = attr_state.history
|
||||
@@ -116,89 +183,108 @@ def _after_flush(session: Any, flush_context: Any) -> None:
|
||||
)
|
||||
|
||||
|
||||
@event.listens_for(AsyncSession.sync_session_class, "after_flush_postexec")
|
||||
def _after_flush_postexec(session: Any, flush_context: Any) -> None:
|
||||
# New objects are now persistent and RETURNING values have been applied,
|
||||
# so server defaults (id, created_at, …) are available via getattr.
|
||||
pending_new: list[Any] = session.info.pop(_SESSION_PENDING_NEW, [])
|
||||
if not pending_new:
|
||||
return
|
||||
session.info.setdefault(_SESSION_CREATES, []).extend(pending_new)
|
||||
|
||||
|
||||
@event.listens_for(AsyncSession.sync_session_class, "after_rollback")
|
||||
def _after_rollback(session: Any) -> None:
|
||||
session.info.pop(_SESSION_PENDING_NEW, None)
|
||||
if session.in_transaction():
|
||||
return
|
||||
session.info.pop(_SESSION_CREATES, None)
|
||||
session.info.pop(_SESSION_DELETES, None)
|
||||
session.info.pop(_SESSION_UPDATES, None)
|
||||
|
||||
|
||||
def _task_error_handler(task: asyncio.Task[Any]) -> None:
|
||||
if not task.cancelled() and (exc := task.exception()):
|
||||
_logger.error(_CALLBACK_ERROR_MSG, exc_info=exc)
|
||||
async def _invoke_callback(
|
||||
fn: Callable[..., Any],
|
||||
obj: Any,
|
||||
event_type: ModelEvent,
|
||||
changes: dict[str, dict[str, Any]] | None,
|
||||
) -> None:
|
||||
"""Call *fn* and await the result if it is awaitable."""
|
||||
result = fn(obj, event_type, changes)
|
||||
if inspect.isawaitable(result):
|
||||
await result
|
||||
|
||||
|
||||
def _call_callback(loop: asyncio.AbstractEventLoop, fn: Any, *args: Any) -> None:
|
||||
"""Dispatch *fn* with *args*, handling both sync and async callables."""
|
||||
try:
|
||||
result = fn(*args)
|
||||
except Exception as exc:
|
||||
_logger.error(_CALLBACK_ERROR_MSG, exc_info=exc)
|
||||
return
|
||||
if asyncio.iscoroutine(result):
|
||||
task = loop.create_task(result)
|
||||
task.add_done_callback(_task_error_handler)
|
||||
class EventSession(AsyncSession):
|
||||
"""AsyncSession subclass that dispatches lifecycle callbacks after commit."""
|
||||
|
||||
async def commit(self) -> None: # noqa: C901
|
||||
await super().commit()
|
||||
|
||||
@event.listens_for(AsyncSession.sync_session_class, "after_commit")
|
||||
def _after_commit(session: Any) -> None:
|
||||
creates: list[Any] = session.info.pop(_SESSION_CREATES, [])
|
||||
deletes: list[Any] = session.info.pop(_SESSION_DELETES, [])
|
||||
field_changes: dict[int, tuple[Any, dict[str, dict[str, Any]]]] = session.info.pop(
|
||||
_SESSION_UPDATES, {}
|
||||
)
|
||||
creates: list[Any] = self.info.pop(_SESSION_CREATES, [])
|
||||
deletes: list[tuple[Any, dict[str, Any]]] = self.info.pop(_SESSION_DELETES, [])
|
||||
field_changes: dict[int, tuple[Any, dict[str, dict[str, Any]]]] = self.info.pop(
|
||||
_SESSION_UPDATES, {}
|
||||
)
|
||||
|
||||
if not creates and not deletes and not field_changes:
|
||||
return
|
||||
if not creates and not deletes and not field_changes:
|
||||
return
|
||||
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
except RuntimeError:
|
||||
return
|
||||
# Suppress transient objects (created + deleted in same transaction).
|
||||
if creates and deletes:
|
||||
created_ids = {id(o) for o in creates}
|
||||
deleted_ids = {id(o) for o, _ in deletes}
|
||||
transient_ids = created_ids & deleted_ids
|
||||
if transient_ids:
|
||||
creates = [o for o in creates if id(o) not in transient_ids]
|
||||
deletes = [(o, s) for o, s in deletes if id(o) not in transient_ids]
|
||||
field_changes = {
|
||||
k: v for k, v in field_changes.items() if k not in transient_ids
|
||||
}
|
||||
|
||||
for obj in creates:
|
||||
_call_callback(loop, obj.on_create)
|
||||
# Suppress updates for deleted objects (row is gone, refresh would fail).
|
||||
if deletes and field_changes:
|
||||
deleted_ids = {id(o) for o, _ in deletes}
|
||||
field_changes = {
|
||||
k: v for k, v in field_changes.items() if k not in deleted_ids
|
||||
}
|
||||
|
||||
for obj in deletes:
|
||||
_call_callback(loop, obj.on_delete)
|
||||
# Suppress updates for newly created objects (CREATE-only semantics).
|
||||
if creates and field_changes:
|
||||
create_ids = {id(o) for o in creates}
|
||||
field_changes = {
|
||||
k: v for k, v in field_changes.items() if k not in create_ids
|
||||
}
|
||||
|
||||
for obj, changes in field_changes.values():
|
||||
_call_callback(loop, obj.on_update, changes)
|
||||
# Dispatch CREATE callbacks.
|
||||
for obj in creates:
|
||||
try:
|
||||
state = sa_inspect(obj, raiseerr=False)
|
||||
if (
|
||||
state is None or state.detached or state.transient
|
||||
): # pragma: no cover
|
||||
continue
|
||||
await self.refresh(obj)
|
||||
for handler in _get_handlers(type(obj), ModelEvent.CREATE):
|
||||
await _invoke_callback(handler, obj, ModelEvent.CREATE, None)
|
||||
except Exception as exc:
|
||||
_logger.error(_CALLBACK_ERROR_MSG, exc_info=exc)
|
||||
|
||||
# Dispatch DELETE callbacks (restore snapshot; row is gone).
|
||||
for obj, snapshot in deletes:
|
||||
try:
|
||||
for key, value in snapshot.items():
|
||||
_sa_set_committed_value(obj, key, value)
|
||||
for handler in _get_handlers(type(obj), ModelEvent.DELETE):
|
||||
await _invoke_callback(handler, obj, ModelEvent.DELETE, None)
|
||||
except Exception as exc:
|
||||
_logger.error(_CALLBACK_ERROR_MSG, exc_info=exc)
|
||||
|
||||
class WatchedFieldsMixin:
|
||||
"""Mixin that enables lifecycle callbacks for SQLAlchemy models."""
|
||||
# Dispatch UPDATE callbacks.
|
||||
for obj, changes in field_changes.values():
|
||||
try:
|
||||
state = sa_inspect(obj, raiseerr=False)
|
||||
if (
|
||||
state is None or state.detached or state.transient
|
||||
): # pragma: no cover
|
||||
continue
|
||||
await self.refresh(obj)
|
||||
for handler in _get_handlers(type(obj), ModelEvent.UPDATE):
|
||||
await _invoke_callback(handler, obj, ModelEvent.UPDATE, changes)
|
||||
except Exception as exc:
|
||||
_logger.error(_CALLBACK_ERROR_MSG, exc_info=exc)
|
||||
|
||||
def on_event(
|
||||
self, event: ModelEvent, changes: dict[str, dict[str, Any]] | None = None
|
||||
) -> Awaitable[None] | None:
|
||||
"""Catch-all callback fired for every lifecycle event.
|
||||
|
||||
Args:
|
||||
event: The event type (:attr:`ModelEvent.CREATE`, :attr:`ModelEvent.DELETE`,
|
||||
or :attr:`ModelEvent.UPDATE`).
|
||||
changes: Field changes for :attr:`ModelEvent.UPDATE`, ``None`` otherwise.
|
||||
"""
|
||||
|
||||
def on_create(self) -> Awaitable[None] | None:
|
||||
"""Called after INSERT commit."""
|
||||
return self.on_event(ModelEvent.CREATE)
|
||||
|
||||
def on_delete(self) -> Awaitable[None] | None:
|
||||
"""Called after DELETE commit."""
|
||||
return self.on_event(ModelEvent.DELETE)
|
||||
|
||||
def on_update(self, changes: dict[str, dict[str, Any]]) -> Awaitable[None] | None:
|
||||
"""Called after UPDATE commit when watched fields change."""
|
||||
return self.on_event(ModelEvent.UPDATE, changes=changes)
|
||||
async def rollback(self) -> None:
|
||||
await super().rollback()
|
||||
self.info.pop(_SESSION_CREATES, None)
|
||||
self.info.pop(_SESSION_DELETES, None)
|
||||
self.info.pop(_SESSION_UPDATES, None)
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
"""Pytest helper utilities for FastAPI testing."""
|
||||
|
||||
import os
|
||||
import warnings
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Any
|
||||
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.engine import make_url
|
||||
from sqlalchemy.ext.asyncio import (
|
||||
AsyncSession,
|
||||
@@ -15,33 +15,8 @@ from sqlalchemy.ext.asyncio import (
|
||||
)
|
||||
from sqlalchemy.orm import DeclarativeBase
|
||||
|
||||
from sqlalchemy import text
|
||||
|
||||
from ..db import (
|
||||
cleanup_tables as _cleanup_tables,
|
||||
create_database,
|
||||
create_db_context,
|
||||
)
|
||||
|
||||
|
||||
async def cleanup_tables(
|
||||
session: AsyncSession,
|
||||
base: type[DeclarativeBase],
|
||||
) -> None:
|
||||
"""Truncate all tables for fast between-test cleanup.
|
||||
|
||||
.. deprecated::
|
||||
Import ``cleanup_tables`` from ``fastapi_toolsets.db`` instead.
|
||||
This re-export will be removed in v3.0.0.
|
||||
"""
|
||||
warnings.warn(
|
||||
"Importing cleanup_tables from fastapi_toolsets.pytest is deprecated "
|
||||
"and will be removed in v3.0.0. "
|
||||
"Use 'from fastapi_toolsets.db import cleanup_tables' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
await _cleanup_tables(session=session, base=base)
|
||||
from ..db import cleanup_tables, create_database
|
||||
from ..models.watched import EventSession
|
||||
|
||||
|
||||
def _get_xdist_worker(default_test_db: str) -> str:
|
||||
@@ -129,7 +104,8 @@ async def create_worker_database(
|
||||
worker_url = worker_database_url(
|
||||
database_url=database_url, default_test_db=default_test_db
|
||||
)
|
||||
worker_db_name: str = make_url(worker_url).database # type: ignore[assignment]
|
||||
worker_db_name = make_url(worker_url).database
|
||||
assert worker_db_name is not None
|
||||
|
||||
engine = create_async_engine(database_url, isolation_level="AUTOCOMMIT")
|
||||
try:
|
||||
@@ -268,15 +244,14 @@ async def create_db_session(
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(base.metadata.create_all)
|
||||
|
||||
# Create session using existing db context utility
|
||||
session_maker = async_sessionmaker(engine, expire_on_commit=expire_on_commit)
|
||||
get_session = create_db_context(session_maker)
|
||||
|
||||
async with get_session() as session:
|
||||
session_maker = async_sessionmaker(
|
||||
engine, expire_on_commit=expire_on_commit, class_=EventSession
|
||||
)
|
||||
async with session_maker() as session:
|
||||
yield session
|
||||
|
||||
if cleanup:
|
||||
await cleanup_tables(session, base)
|
||||
await cleanup_tables(session=session, base=base)
|
||||
|
||||
if drop_tables:
|
||||
async with engine.begin() as conn:
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
"""Base Pydantic schemas for API responses."""
|
||||
|
||||
import math
|
||||
from enum import Enum
|
||||
from typing import Annotated, Any, ClassVar, Generic, Literal, TypeVar, Union
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from pydantic import BaseModel, ConfigDict, Field, computed_field
|
||||
|
||||
from .types import DataT
|
||||
|
||||
@@ -98,17 +99,29 @@ class OffsetPagination(PydanticBase):
|
||||
"""Pagination metadata for offset-based list responses.
|
||||
|
||||
Attributes:
|
||||
total_count: Total number of items across all pages
|
||||
total_count: Total number of items across all pages.
|
||||
``None`` when ``include_total=False``.
|
||||
items_per_page: Number of items per page
|
||||
page: Current page number (1-indexed)
|
||||
has_more: Whether there are more pages
|
||||
pages: Total number of pages
|
||||
"""
|
||||
|
||||
total_count: int
|
||||
total_count: int | None
|
||||
items_per_page: int
|
||||
page: int
|
||||
has_more: bool
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def pages(self) -> int | None:
|
||||
"""Total number of pages, or ``None`` when ``total_count`` is unknown."""
|
||||
if self.total_count is None:
|
||||
return None
|
||||
if self.items_per_page == 0:
|
||||
return 0
|
||||
return math.ceil(self.total_count / self.items_per_page)
|
||||
|
||||
|
||||
class CursorPagination(PydanticBase):
|
||||
"""Pagination metadata for cursor-based list responses.
|
||||
@@ -149,21 +162,23 @@ class PaginatedResponse(BaseResponse, Generic[DataT]):
|
||||
pagination: OffsetPagination | CursorPagination
|
||||
pagination_type: PaginationType | None = None
|
||||
filter_attributes: dict[str, list[Any]] | None = None
|
||||
search_columns: list[str] | None = None
|
||||
order_columns: list[str] | None = None
|
||||
|
||||
_discriminated_union_cache: ClassVar[dict[Any, Any]] = {}
|
||||
|
||||
def __class_getitem__( # type: ignore[invalid-method-override]
|
||||
def __class_getitem__( # ty:ignore[invalid-method-override]
|
||||
cls, item: type[Any] | tuple[type[Any], ...]
|
||||
) -> type[Any]:
|
||||
if cls is PaginatedResponse and not isinstance(item, TypeVar):
|
||||
cached = cls._discriminated_union_cache.get(item)
|
||||
if cached is None:
|
||||
cached = Annotated[
|
||||
Union[CursorPaginatedResponse[item], OffsetPaginatedResponse[item]], # type: ignore[invalid-type-form]
|
||||
Union[CursorPaginatedResponse[item], OffsetPaginatedResponse[item]], # ty:ignore[invalid-type-form]
|
||||
Field(discriminator="pagination_type"),
|
||||
]
|
||||
cls._discriminated_union_cache[item] = cached
|
||||
return cached # type: ignore[invalid-return-type]
|
||||
return cached # ty:ignore[invalid-return-type]
|
||||
return super().__class_getitem__(item)
|
||||
|
||||
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
"""Authentication helpers for FastAPI using Security()."""
|
||||
|
||||
from .abc import AuthSource
|
||||
from .oauth import (
|
||||
oauth_build_authorization_redirect,
|
||||
oauth_decode_state,
|
||||
oauth_encode_state,
|
||||
oauth_fetch_userinfo,
|
||||
oauth_resolve_provider_urls,
|
||||
)
|
||||
from .sources import APIKeyHeaderAuth, BearerTokenAuth, CookieAuth, MultiAuth
|
||||
|
||||
__all__ = [
|
||||
"APIKeyHeaderAuth",
|
||||
"AuthSource",
|
||||
"BearerTokenAuth",
|
||||
"CookieAuth",
|
||||
"MultiAuth",
|
||||
"oauth_build_authorization_redirect",
|
||||
"oauth_decode_state",
|
||||
"oauth_encode_state",
|
||||
"oauth_fetch_userinfo",
|
||||
"oauth_resolve_provider_urls",
|
||||
]
|
||||
@@ -1,53 +0,0 @@
|
||||
"""Abstract base class for authentication sources."""
|
||||
|
||||
import inspect
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Callable
|
||||
|
||||
from fastapi import Request
|
||||
from fastapi.security import SecurityScopes
|
||||
|
||||
from fastapi_toolsets.exceptions import UnauthorizedError
|
||||
|
||||
|
||||
def _ensure_async(fn: Callable[..., Any]) -> Callable[..., Any]:
|
||||
"""Wrap *fn* so it can always be awaited, caching the coroutine check at init time."""
|
||||
if inspect.iscoroutinefunction(fn):
|
||||
return fn
|
||||
|
||||
async def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class AuthSource(ABC):
|
||||
"""Abstract base class for authentication sources."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Set up the default FastAPI dependency signature."""
|
||||
source = self
|
||||
|
||||
async def _call(
|
||||
request: Request,
|
||||
security_scopes: SecurityScopes, # noqa: ARG001
|
||||
) -> Any:
|
||||
credential = await source.extract(request)
|
||||
if credential is None:
|
||||
raise UnauthorizedError()
|
||||
return await source.authenticate(credential)
|
||||
|
||||
self._call_fn: Callable[..., Any] = _call
|
||||
self.__signature__ = inspect.signature(_call)
|
||||
|
||||
@abstractmethod
|
||||
async def extract(self, request: Request) -> str | None:
|
||||
"""Extract the raw credential from the request without validating."""
|
||||
|
||||
@abstractmethod
|
||||
async def authenticate(self, credential: str) -> Any:
|
||||
"""Validate a credential and return the authenticated identity."""
|
||||
|
||||
async def __call__(self, **kwargs: Any) -> Any:
|
||||
"""FastAPI dependency dispatch."""
|
||||
return await self._call_fn(**kwargs)
|
||||
@@ -1,140 +0,0 @@
|
||||
"""OAuth 2.0 / OIDC helper utilities."""
|
||||
|
||||
import base64
|
||||
from typing import Any
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import httpx
|
||||
from fastapi.responses import RedirectResponse
|
||||
|
||||
_discovery_cache: dict[str, dict] = {}
|
||||
|
||||
|
||||
async def oauth_resolve_provider_urls(
|
||||
discovery_url: str,
|
||||
) -> tuple[str, str, str | None]:
|
||||
"""Fetch the OIDC discovery document and return endpoint URLs.
|
||||
|
||||
Args:
|
||||
discovery_url: URL of the provider's ``/.well-known/openid-configuration``.
|
||||
|
||||
Returns:
|
||||
A ``(authorization_url, token_url, userinfo_url)`` tuple.
|
||||
*userinfo_url* is ``None`` when the provider does not advertise one.
|
||||
"""
|
||||
if discovery_url not in _discovery_cache:
|
||||
async with httpx.AsyncClient() as client:
|
||||
resp = await client.get(discovery_url)
|
||||
resp.raise_for_status()
|
||||
_discovery_cache[discovery_url] = resp.json()
|
||||
cfg = _discovery_cache[discovery_url]
|
||||
return (
|
||||
cfg["authorization_endpoint"],
|
||||
cfg["token_endpoint"],
|
||||
cfg.get("userinfo_endpoint"),
|
||||
)
|
||||
|
||||
|
||||
async def oauth_fetch_userinfo(
|
||||
*,
|
||||
token_url: str,
|
||||
userinfo_url: str,
|
||||
code: str,
|
||||
client_id: str,
|
||||
client_secret: str,
|
||||
redirect_uri: str,
|
||||
) -> dict[str, Any]:
|
||||
"""Exchange an authorization code for tokens and return the userinfo payload.
|
||||
|
||||
Performs the two-step OAuth 2.0 / OIDC token exchange:
|
||||
|
||||
1. POSTs the authorization *code* to *token_url* to obtain an access token.
|
||||
2. GETs *userinfo_url* using that access token as a Bearer credential.
|
||||
|
||||
Args:
|
||||
token_url: Provider's token endpoint.
|
||||
userinfo_url: Provider's userinfo endpoint.
|
||||
code: Authorization code received from the provider's callback.
|
||||
client_id: OAuth application client ID.
|
||||
client_secret: OAuth application client secret.
|
||||
redirect_uri: Redirect URI that was used in the authorization request.
|
||||
|
||||
Returns:
|
||||
The JSON payload returned by the userinfo endpoint as a plain ``dict``.
|
||||
"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
token_resp = await client.post(
|
||||
token_url,
|
||||
data={
|
||||
"grant_type": "authorization_code",
|
||||
"code": code,
|
||||
"client_id": client_id,
|
||||
"client_secret": client_secret,
|
||||
"redirect_uri": redirect_uri,
|
||||
},
|
||||
headers={"Accept": "application/json"},
|
||||
)
|
||||
token_resp.raise_for_status()
|
||||
access_token = token_resp.json()["access_token"]
|
||||
|
||||
userinfo_resp = await client.get(
|
||||
userinfo_url,
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
)
|
||||
userinfo_resp.raise_for_status()
|
||||
return userinfo_resp.json()
|
||||
|
||||
|
||||
def oauth_build_authorization_redirect(
|
||||
authorization_url: str,
|
||||
*,
|
||||
client_id: str,
|
||||
scopes: str,
|
||||
redirect_uri: str,
|
||||
destination: str,
|
||||
) -> RedirectResponse:
|
||||
"""Return an OAuth 2.0 authorization ``RedirectResponse``.
|
||||
|
||||
Args:
|
||||
authorization_url: Provider's authorization endpoint.
|
||||
client_id: OAuth application client ID.
|
||||
scopes: Space-separated list of requested scopes.
|
||||
redirect_uri: URI the provider should redirect back to after authorization.
|
||||
destination: URL the user should be sent to after the full OAuth flow
|
||||
completes (encoded as ``state``).
|
||||
|
||||
Returns:
|
||||
A :class:`~fastapi.responses.RedirectResponse` to the provider's
|
||||
authorization page.
|
||||
"""
|
||||
params = urlencode(
|
||||
{
|
||||
"client_id": client_id,
|
||||
"response_type": "code",
|
||||
"scope": scopes,
|
||||
"redirect_uri": redirect_uri,
|
||||
"state": oauth_encode_state(destination),
|
||||
}
|
||||
)
|
||||
return RedirectResponse(f"{authorization_url}?{params}")
|
||||
|
||||
|
||||
def oauth_encode_state(url: str) -> str:
|
||||
"""Base64url-encode a URL to embed as an OAuth ``state`` parameter."""
|
||||
return base64.urlsafe_b64encode(url.encode()).decode()
|
||||
|
||||
|
||||
def oauth_decode_state(state: str | None, *, fallback: str) -> str:
|
||||
"""Decode a base64url OAuth ``state`` parameter.
|
||||
|
||||
Handles missing padding (some providers strip ``=``).
|
||||
Returns *fallback* if *state* is absent, the literal string ``"null"``,
|
||||
or cannot be decoded.
|
||||
"""
|
||||
if not state or state == "null":
|
||||
return fallback
|
||||
try:
|
||||
padded = state + "=" * (4 - len(state) % 4)
|
||||
return base64.urlsafe_b64decode(padded).decode()
|
||||
except Exception:
|
||||
return fallback
|
||||
@@ -1,8 +0,0 @@
|
||||
"""Built-in authentication source implementations."""
|
||||
|
||||
from .header import APIKeyHeaderAuth
|
||||
from .bearer import BearerTokenAuth
|
||||
from .cookie import CookieAuth
|
||||
from .multi import MultiAuth
|
||||
|
||||
__all__ = ["APIKeyHeaderAuth", "BearerTokenAuth", "CookieAuth", "MultiAuth"]
|
||||
@@ -1,120 +0,0 @@
|
||||
"""Bearer token authentication source."""
|
||||
|
||||
import inspect
|
||||
import secrets
|
||||
from typing import Annotated, Any, Callable
|
||||
|
||||
from fastapi import Depends
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer, SecurityScopes
|
||||
|
||||
from fastapi_toolsets.exceptions import UnauthorizedError
|
||||
|
||||
from ..abc import AuthSource, _ensure_async
|
||||
|
||||
|
||||
class BearerTokenAuth(AuthSource):
|
||||
"""Bearer token authentication source.
|
||||
|
||||
Wraps :class:`fastapi.security.HTTPBearer` for OpenAPI documentation.
|
||||
The validator is called as ``await validator(credential, **kwargs)``
|
||||
where ``kwargs`` are the extra keyword arguments provided at instantiation.
|
||||
|
||||
Args:
|
||||
validator: Sync or async callable that receives the credential and any
|
||||
extra keyword arguments, and returns the authenticated identity
|
||||
(e.g. a ``User`` model). Should raise
|
||||
:class:`~fastapi_toolsets.exceptions.UnauthorizedError` on failure.
|
||||
prefix: Optional token prefix (e.g. ``"user_"``). If set, only tokens
|
||||
whose value starts with this prefix are matched. The prefix is
|
||||
**kept** in the value passed to the validator — store and compare
|
||||
tokens with their prefix included. Use :meth:`generate_token` to
|
||||
create correctly-prefixed tokens. This enables multiple
|
||||
``BearerTokenAuth`` instances in the same app (e.g. ``"user_"``
|
||||
for user tokens, ``"org_"`` for org tokens).
|
||||
**kwargs: Extra keyword arguments forwarded to the validator on every
|
||||
call (e.g. ``role=Role.ADMIN``).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
validator: Callable[..., Any],
|
||||
*,
|
||||
prefix: str | None = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
self._validator = _ensure_async(validator)
|
||||
self._prefix = prefix
|
||||
self._kwargs = kwargs
|
||||
self._scheme = HTTPBearer(auto_error=False)
|
||||
|
||||
async def _call(
|
||||
security_scopes: SecurityScopes, # noqa: ARG001
|
||||
credentials: Annotated[
|
||||
HTTPAuthorizationCredentials | None, Depends(self._scheme)
|
||||
] = None,
|
||||
) -> Any:
|
||||
if credentials is None:
|
||||
raise UnauthorizedError()
|
||||
return await self._validate(credentials.credentials)
|
||||
|
||||
self._call_fn = _call
|
||||
self.__signature__ = inspect.signature(_call)
|
||||
|
||||
async def _validate(self, token: str) -> Any:
|
||||
"""Check prefix and call the validator."""
|
||||
if self._prefix is not None and not token.startswith(self._prefix):
|
||||
raise UnauthorizedError()
|
||||
return await self._validator(token, **self._kwargs)
|
||||
|
||||
async def extract(self, request: Any) -> str | None:
|
||||
"""Extract the raw credential from the request without validating.
|
||||
|
||||
Returns ``None`` if no ``Authorization: Bearer`` header is present,
|
||||
the token is empty, or the token does not match the configured prefix.
|
||||
The prefix is included in the returned value.
|
||||
"""
|
||||
auth = request.headers.get("Authorization", "")
|
||||
if not auth.startswith("Bearer "):
|
||||
return None
|
||||
token = auth[7:]
|
||||
if not token:
|
||||
return None
|
||||
if self._prefix is not None and not token.startswith(self._prefix):
|
||||
return None
|
||||
return token
|
||||
|
||||
async def authenticate(self, credential: str) -> Any:
|
||||
"""Validate a credential and return the identity.
|
||||
|
||||
Calls ``await validator(credential, **kwargs)`` where ``kwargs`` are
|
||||
the extra keyword arguments provided at instantiation.
|
||||
"""
|
||||
return await self._validate(credential)
|
||||
|
||||
def require(self, **kwargs: Any) -> "BearerTokenAuth":
|
||||
"""Return a new instance with additional (or overriding) validator kwargs."""
|
||||
return BearerTokenAuth(
|
||||
self._validator,
|
||||
prefix=self._prefix,
|
||||
**{**self._kwargs, **kwargs},
|
||||
)
|
||||
|
||||
def generate_token(self, nbytes: int = 32) -> str:
|
||||
"""Generate a secure random token for this auth source.
|
||||
|
||||
Returns a URL-safe random token. If a prefix is configured it is
|
||||
prepended — the returned value is what you store in your database
|
||||
and return to the client as-is.
|
||||
|
||||
Args:
|
||||
nbytes: Number of random bytes before base64 encoding. The
|
||||
resulting string is ``ceil(nbytes * 4 / 3)`` characters
|
||||
(43 chars for the default 32 bytes). Defaults to 32.
|
||||
|
||||
Returns:
|
||||
A ready-to-use token string (e.g. ``"user_Xk3..."``).
|
||||
"""
|
||||
token = secrets.token_urlsafe(nbytes)
|
||||
if self._prefix is not None:
|
||||
return f"{self._prefix}{token}"
|
||||
return token
|
||||
@@ -1,139 +0,0 @@
|
||||
"""Cookie-based authentication source."""
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import hmac
|
||||
import inspect
|
||||
import json
|
||||
import time
|
||||
from typing import Annotated, Any, Callable
|
||||
|
||||
from fastapi import Depends, Request, Response
|
||||
from fastapi.security import APIKeyCookie, SecurityScopes
|
||||
|
||||
from fastapi_toolsets.exceptions import UnauthorizedError
|
||||
|
||||
from ..abc import AuthSource, _ensure_async
|
||||
|
||||
|
||||
class CookieAuth(AuthSource):
|
||||
"""Cookie-based authentication source.
|
||||
|
||||
Wraps :class:`fastapi.security.APIKeyCookie` for OpenAPI documentation.
|
||||
Optionally signs the cookie with HMAC-SHA256 to provide stateless, tamper-
|
||||
proof sessions without any database entry.
|
||||
|
||||
Args:
|
||||
name: Cookie name.
|
||||
validator: Sync or async callable that receives the cookie value
|
||||
(plain, after signature verification when ``secret_key`` is set)
|
||||
and any extra keyword arguments, and returns the authenticated
|
||||
identity.
|
||||
secret_key: When provided, the cookie is HMAC-SHA256 signed.
|
||||
:meth:`set_cookie` embeds an expiry and signs the payload;
|
||||
:meth:`extract` verifies the signature and expiry before handing
|
||||
the plain value to the validator. When ``None`` (default), the raw
|
||||
cookie value is passed to the validator as-is.
|
||||
ttl: Cookie lifetime in seconds (default 24 h). Only used when
|
||||
``secret_key`` is set.
|
||||
**kwargs: Extra keyword arguments forwarded to the validator on every
|
||||
call (e.g. ``role=Role.ADMIN``).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
validator: Callable[..., Any],
|
||||
*,
|
||||
secret_key: str | None = None,
|
||||
ttl: int = 86400,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
self._name = name
|
||||
self._validator = _ensure_async(validator)
|
||||
self._secret_key = secret_key
|
||||
self._ttl = ttl
|
||||
self._kwargs = kwargs
|
||||
self._scheme = APIKeyCookie(name=name, auto_error=False)
|
||||
|
||||
async def _call(
|
||||
security_scopes: SecurityScopes, # noqa: ARG001
|
||||
value: Annotated[str | None, Depends(self._scheme)] = None,
|
||||
) -> Any:
|
||||
if value is None:
|
||||
raise UnauthorizedError()
|
||||
plain = self._verify(value)
|
||||
return await self._validator(plain, **self._kwargs)
|
||||
|
||||
self._call_fn = _call
|
||||
self.__signature__ = inspect.signature(_call)
|
||||
|
||||
def _hmac(self, data: str) -> str:
|
||||
if self._secret_key is None:
|
||||
raise RuntimeError("_hmac called without secret_key configured")
|
||||
return hmac.new(
|
||||
self._secret_key.encode(), data.encode(), hashlib.sha256
|
||||
).hexdigest()
|
||||
|
||||
def _sign(self, value: str) -> str:
|
||||
data = base64.urlsafe_b64encode(
|
||||
json.dumps({"v": value, "exp": int(time.time()) + self._ttl}).encode()
|
||||
).decode()
|
||||
return f"{data}.{self._hmac(data)}"
|
||||
|
||||
def _verify(self, cookie_value: str) -> str:
|
||||
"""Return the plain value, verifying HMAC + expiry when signed."""
|
||||
if not self._secret_key:
|
||||
return cookie_value
|
||||
|
||||
try:
|
||||
data, sig = cookie_value.rsplit(".", 1)
|
||||
except ValueError:
|
||||
raise UnauthorizedError()
|
||||
|
||||
if not hmac.compare_digest(self._hmac(data), sig):
|
||||
raise UnauthorizedError()
|
||||
|
||||
try:
|
||||
payload = json.loads(base64.urlsafe_b64decode(data))
|
||||
value: str = payload["v"]
|
||||
exp: int = payload["exp"]
|
||||
except Exception:
|
||||
raise UnauthorizedError()
|
||||
|
||||
if exp < int(time.time()):
|
||||
raise UnauthorizedError()
|
||||
|
||||
return value
|
||||
|
||||
async def extract(self, request: Request) -> str | None:
|
||||
return request.cookies.get(self._name)
|
||||
|
||||
async def authenticate(self, credential: str) -> Any:
|
||||
plain = self._verify(credential)
|
||||
return await self._validator(plain, **self._kwargs)
|
||||
|
||||
def require(self, **kwargs: Any) -> "CookieAuth":
|
||||
"""Return a new instance with additional (or overriding) validator kwargs."""
|
||||
return CookieAuth(
|
||||
self._name,
|
||||
self._validator,
|
||||
secret_key=self._secret_key,
|
||||
ttl=self._ttl,
|
||||
**{**self._kwargs, **kwargs},
|
||||
)
|
||||
|
||||
def set_cookie(self, response: Response, value: str) -> None:
|
||||
"""Attach the cookie to *response*, signing it when ``secret_key`` is set."""
|
||||
cookie_value = self._sign(value) if self._secret_key else value
|
||||
response.set_cookie(
|
||||
self._name,
|
||||
cookie_value,
|
||||
httponly=True,
|
||||
samesite="lax",
|
||||
max_age=self._ttl,
|
||||
)
|
||||
|
||||
def delete_cookie(self, response: Response) -> None:
|
||||
"""Clear the session cookie (logout)."""
|
||||
response.delete_cookie(self._name, httponly=True, samesite="lax")
|
||||
@@ -1,67 +0,0 @@
|
||||
"""API key header authentication source."""
|
||||
|
||||
import inspect
|
||||
from typing import Annotated, Any, Callable
|
||||
|
||||
from fastapi import Depends, Request
|
||||
from fastapi.security import APIKeyHeader, SecurityScopes
|
||||
|
||||
from fastapi_toolsets.exceptions import UnauthorizedError
|
||||
|
||||
from ..abc import AuthSource, _ensure_async
|
||||
|
||||
|
||||
class APIKeyHeaderAuth(AuthSource):
|
||||
"""API key header authentication source.
|
||||
|
||||
Wraps :class:`fastapi.security.APIKeyHeader` for OpenAPI documentation.
|
||||
The validator is called as ``await validator(api_key, **kwargs)``
|
||||
where ``kwargs`` are the extra keyword arguments provided at instantiation.
|
||||
|
||||
Args:
|
||||
name: HTTP header name that carries the API key (e.g. ``"X-API-Key"``).
|
||||
validator: Sync or async callable that receives the API key and any
|
||||
extra keyword arguments, and returns the authenticated identity.
|
||||
Should raise :class:`~fastapi_toolsets.exceptions.UnauthorizedError`
|
||||
on failure.
|
||||
**kwargs: Extra keyword arguments forwarded to the validator on every
|
||||
call (e.g. ``role=Role.ADMIN``).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
validator: Callable[..., Any],
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
self._name = name
|
||||
self._validator = _ensure_async(validator)
|
||||
self._kwargs = kwargs
|
||||
self._scheme = APIKeyHeader(name=name, auto_error=False)
|
||||
|
||||
async def _call(
|
||||
security_scopes: SecurityScopes, # noqa: ARG001
|
||||
api_key: Annotated[str | None, Depends(self._scheme)] = None,
|
||||
) -> Any:
|
||||
if api_key is None:
|
||||
raise UnauthorizedError()
|
||||
return await self._validator(api_key, **self._kwargs)
|
||||
|
||||
self._call_fn = _call
|
||||
self.__signature__ = inspect.signature(_call)
|
||||
|
||||
async def extract(self, request: Request) -> str | None:
|
||||
"""Extract the API key from the configured header."""
|
||||
return request.headers.get(self._name) or None
|
||||
|
||||
async def authenticate(self, credential: str) -> Any:
|
||||
"""Validate a credential and return the identity."""
|
||||
return await self._validator(credential, **self._kwargs)
|
||||
|
||||
def require(self, **kwargs: Any) -> "APIKeyHeaderAuth":
|
||||
"""Return a new instance with additional (or overriding) validator kwargs."""
|
||||
return APIKeyHeaderAuth(
|
||||
self._name,
|
||||
self._validator,
|
||||
**{**self._kwargs, **kwargs},
|
||||
)
|
||||
@@ -1,119 +0,0 @@
|
||||
"""MultiAuth: combine multiple authentication sources into a single callable."""
|
||||
|
||||
import inspect
|
||||
from typing import Any, cast
|
||||
|
||||
from fastapi import Request
|
||||
from fastapi.security import SecurityScopes
|
||||
|
||||
from fastapi_toolsets.exceptions import UnauthorizedError
|
||||
|
||||
from ..abc import AuthSource
|
||||
|
||||
|
||||
class MultiAuth:
|
||||
"""Combine multiple authentication sources into a single callable.
|
||||
|
||||
Sources are tried in order; the first one whose
|
||||
:meth:`~AuthSource.extract` returns a non-``None`` credential wins.
|
||||
Its :meth:`~AuthSource.authenticate` is called and the result returned.
|
||||
|
||||
If a credential is found but the validator raises, the exception propagates
|
||||
immediately — the remaining sources are **not** tried. This prevents
|
||||
silent fallthrough on invalid credentials.
|
||||
|
||||
If no source provides a credential,
|
||||
:class:`~fastapi_toolsets.exceptions.UnauthorizedError` is raised.
|
||||
|
||||
The :meth:`~AuthSource.extract` method of each source performs only
|
||||
string matching (no I/O), so prefix-based dispatch is essentially free.
|
||||
|
||||
Any :class:`~AuthSource` subclass — including user-defined ones — can be
|
||||
passed as a source.
|
||||
|
||||
Args:
|
||||
*sources: Auth source instances to try in order.
|
||||
|
||||
Example::
|
||||
|
||||
user_bearer = BearerTokenAuth(verify_user, prefix="user_")
|
||||
org_bearer = BearerTokenAuth(verify_org, prefix="org_")
|
||||
cookie = CookieAuth("session", verify_session)
|
||||
|
||||
multi = MultiAuth(user_bearer, org_bearer, cookie)
|
||||
|
||||
@app.get("/data")
|
||||
async def data_route(user = Security(multi)):
|
||||
return user
|
||||
|
||||
# Apply a shared requirement to all sources at once
|
||||
@app.get("/admin")
|
||||
async def admin_route(user = Security(multi.require(role=Role.ADMIN))):
|
||||
return user
|
||||
"""
|
||||
|
||||
def __init__(self, *sources: AuthSource) -> None:
|
||||
self._sources = sources
|
||||
|
||||
async def _call(
|
||||
request: Request,
|
||||
security_scopes: SecurityScopes, # noqa: ARG001
|
||||
**kwargs: Any, # noqa: ARG001 — absorbs scheme values injected by FastAPI
|
||||
) -> Any:
|
||||
for source in self._sources:
|
||||
credential = await source.extract(request)
|
||||
if credential is not None:
|
||||
return await source.authenticate(credential)
|
||||
raise UnauthorizedError()
|
||||
|
||||
self._call_fn = _call
|
||||
|
||||
# Build a merged signature that includes the security-scheme Depends()
|
||||
# parameters from every source so FastAPI registers them in OpenAPI docs.
|
||||
seen: set[str] = {"request", "security_scopes"}
|
||||
merged: list[inspect.Parameter] = [
|
||||
inspect.Parameter(
|
||||
"request",
|
||||
inspect.Parameter.POSITIONAL_OR_KEYWORD,
|
||||
annotation=Request,
|
||||
),
|
||||
inspect.Parameter(
|
||||
"security_scopes",
|
||||
inspect.Parameter.POSITIONAL_OR_KEYWORD,
|
||||
annotation=SecurityScopes,
|
||||
),
|
||||
]
|
||||
for i, source in enumerate(sources):
|
||||
for name, param in inspect.signature(source).parameters.items():
|
||||
if name in seen:
|
||||
continue
|
||||
merged.append(param.replace(name=f"_s{i}_{name}"))
|
||||
seen.add(name)
|
||||
self.__signature__ = inspect.Signature(merged, return_annotation=Any)
|
||||
|
||||
async def __call__(self, **kwargs: Any) -> Any:
|
||||
return await self._call_fn(**kwargs)
|
||||
|
||||
def require(self, **kwargs: Any) -> "MultiAuth":
|
||||
"""Return a new :class:`MultiAuth` with kwargs forwarded to each source.
|
||||
|
||||
Calls ``.require(**kwargs)`` on every source that supports it. Sources
|
||||
that do not implement ``.require()`` (e.g. custom :class:`~AuthSource`
|
||||
subclasses) are passed through unchanged.
|
||||
|
||||
New kwargs are merged over each source's existing kwargs — new values
|
||||
win on conflict::
|
||||
|
||||
multi = MultiAuth(bearer, cookie)
|
||||
|
||||
@app.get("/admin")
|
||||
async def admin(user = Security(multi.require(role=Role.ADMIN))):
|
||||
return user
|
||||
"""
|
||||
new_sources = tuple(
|
||||
cast(Any, source).require(**kwargs)
|
||||
if hasattr(source, "require")
|
||||
else source
|
||||
for source in self._sources
|
||||
)
|
||||
return MultiAuth(*new_sources)
|
||||
@@ -15,13 +15,14 @@ ModelType = TypeVar("ModelType", bound=DeclarativeBase)
|
||||
SchemaType = TypeVar("SchemaType", bound=BaseModel)
|
||||
|
||||
# CRUD type aliases
|
||||
JoinType = list[tuple[type[DeclarativeBase], Any]]
|
||||
JoinType = list[tuple[type[DeclarativeBase] | Any, Any]]
|
||||
M2MFieldType = Mapping[str, QueryableAttribute[Any]]
|
||||
OrderByClause = ColumnElement[Any] | QueryableAttribute[Any]
|
||||
|
||||
# Search / facet type aliases
|
||||
# Search / facet / order type aliases
|
||||
SearchFieldType = InstrumentedAttribute[Any] | tuple[InstrumentedAttribute[Any], ...]
|
||||
FacetFieldType = SearchFieldType
|
||||
OrderFieldType = SearchFieldType
|
||||
|
||||
# Dependency type aliases
|
||||
SessionDependency = Callable[[], AsyncGenerator[AsyncSession, None]] | Any
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import os
|
||||
import uuid
|
||||
from enum import Enum
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel
|
||||
@@ -12,13 +13,16 @@ from sqlalchemy import (
|
||||
Column,
|
||||
Date,
|
||||
DateTime,
|
||||
Enum as SAEnum,
|
||||
ForeignKey,
|
||||
Integer,
|
||||
JSON,
|
||||
Numeric,
|
||||
String,
|
||||
Table,
|
||||
Uuid,
|
||||
)
|
||||
from sqlalchemy.dialects.postgresql import ARRAY
|
||||
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
||||
|
||||
@@ -57,6 +61,7 @@ class User(Base):
|
||||
username: Mapped[str] = mapped_column(String(50), unique=True)
|
||||
email: Mapped[str] = mapped_column(String(100), unique=True)
|
||||
is_active: Mapped[bool] = mapped_column(default=True)
|
||||
notes: Mapped[str | None]
|
||||
role_id: Mapped[uuid.UUID | None] = mapped_column(
|
||||
ForeignKey("roles.id"), nullable=True
|
||||
)
|
||||
@@ -136,6 +141,57 @@ class Post(Base):
|
||||
tags: Mapped[list[Tag]] = relationship(secondary=post_tags)
|
||||
|
||||
|
||||
class OrderStatus(int, Enum):
|
||||
"""Integer-backed enum for order status."""
|
||||
|
||||
PENDING = 1
|
||||
PROCESSING = 2
|
||||
SHIPPED = 3
|
||||
CANCELLED = 4
|
||||
|
||||
|
||||
class Color(str, Enum):
|
||||
"""String-backed enum for color."""
|
||||
|
||||
RED = "red"
|
||||
GREEN = "green"
|
||||
BLUE = "blue"
|
||||
|
||||
|
||||
class Order(Base):
|
||||
"""Test model with an IntEnum column (Enum(int, Enum)) and a raw Integer column."""
|
||||
|
||||
__tablename__ = "orders"
|
||||
|
||||
id: Mapped[uuid.UUID] = mapped_column(Uuid, primary_key=True, default=uuid.uuid4)
|
||||
name: Mapped[str] = mapped_column(String(100))
|
||||
status: Mapped[OrderStatus] = mapped_column(SAEnum(OrderStatus))
|
||||
priority: Mapped[int] = mapped_column(Integer)
|
||||
color: Mapped[Color] = mapped_column(SAEnum(Color))
|
||||
|
||||
|
||||
class Transfer(Base):
|
||||
"""Test model with two FKs to the same table (users)."""
|
||||
|
||||
__tablename__ = "transfers"
|
||||
|
||||
id: Mapped[uuid.UUID] = mapped_column(Uuid, primary_key=True, default=uuid.uuid4)
|
||||
amount: Mapped[str] = mapped_column(String(50))
|
||||
sender_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("users.id"))
|
||||
receiver_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("users.id"))
|
||||
|
||||
|
||||
class Article(Base):
|
||||
"""Test article model with ARRAY and JSON columns."""
|
||||
|
||||
__tablename__ = "articles"
|
||||
|
||||
id: Mapped[uuid.UUID] = mapped_column(Uuid, primary_key=True, default=uuid.uuid4)
|
||||
title: Mapped[str] = mapped_column(String(200))
|
||||
labels: Mapped[list[str]] = mapped_column(ARRAY(String))
|
||||
metadata_: Mapped[dict | None] = mapped_column("metadata", JSON, nullable=True)
|
||||
|
||||
|
||||
class RoleCreate(BaseModel):
|
||||
"""Schema for creating a role."""
|
||||
|
||||
@@ -270,6 +326,61 @@ class ProductCreate(BaseModel):
|
||||
price: decimal.Decimal
|
||||
|
||||
|
||||
class ArticleCreate(BaseModel):
|
||||
"""Schema for creating an article."""
|
||||
|
||||
id: uuid.UUID | None = None
|
||||
title: str
|
||||
labels: list[str] = []
|
||||
|
||||
|
||||
class ArticleRead(PydanticBase):
|
||||
"""Schema for reading an article."""
|
||||
|
||||
id: uuid.UUID
|
||||
title: str
|
||||
labels: list[str]
|
||||
|
||||
|
||||
class OrderCreate(BaseModel):
|
||||
"""Schema for creating an order."""
|
||||
|
||||
id: uuid.UUID | None = None
|
||||
name: str
|
||||
status: OrderStatus
|
||||
priority: int = 0
|
||||
color: Color = Color.RED
|
||||
|
||||
|
||||
class OrderRead(PydanticBase):
|
||||
"""Schema for reading an order."""
|
||||
|
||||
id: uuid.UUID
|
||||
name: str
|
||||
status: OrderStatus
|
||||
priority: int
|
||||
color: Color
|
||||
|
||||
|
||||
class TransferCreate(BaseModel):
|
||||
"""Schema for creating a transfer."""
|
||||
|
||||
id: uuid.UUID | None = None
|
||||
amount: str
|
||||
sender_id: uuid.UUID
|
||||
receiver_id: uuid.UUID
|
||||
|
||||
|
||||
class TransferRead(PydanticBase):
|
||||
"""Schema for reading a transfer."""
|
||||
|
||||
id: uuid.UUID
|
||||
amount: str
|
||||
|
||||
|
||||
OrderCrud = CrudFactory(Order)
|
||||
TransferCrud = CrudFactory(Transfer)
|
||||
ArticleCrud = CrudFactory(Article)
|
||||
RoleCrud = CrudFactory(Role)
|
||||
RoleCursorCrud = CrudFactory(Role, cursor_column=Role.id)
|
||||
IntRoleCursorCrud = CrudFactory(IntRole, cursor_column=IntRole.id)
|
||||
@@ -321,30 +432,3 @@ async def db_session(engine):
|
||||
# Drop tables after test
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_role_data() -> RoleCreate:
|
||||
"""Sample role creation data."""
|
||||
return RoleCreate(name="admin")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_user_data() -> UserCreate:
|
||||
"""Sample user creation data."""
|
||||
return UserCreate(
|
||||
username="testuser",
|
||||
email="test@example.com",
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_post_data() -> PostCreate:
|
||||
"""Sample post creation data."""
|
||||
return PostCreate(
|
||||
title="Test Post",
|
||||
content="Test content",
|
||||
is_published=True,
|
||||
author_id=uuid.uuid4(),
|
||||
)
|
||||
|
||||
@@ -38,6 +38,10 @@ from .conftest import (
|
||||
Tag,
|
||||
TagCreate,
|
||||
TagCrud,
|
||||
Transfer,
|
||||
TransferCreate,
|
||||
TransferCrud,
|
||||
TransferRead,
|
||||
User,
|
||||
UserCreate,
|
||||
UserCrud,
|
||||
@@ -211,6 +215,92 @@ class TestResolveLoadOptions:
|
||||
assert crud._resolve_load_options([]) == []
|
||||
|
||||
|
||||
class TestResolveSearchColumns:
|
||||
"""Tests for _resolve_search_columns logic."""
|
||||
|
||||
def test_returns_none_when_no_searchable_fields(self):
|
||||
"""Returns None when cls.searchable_fields is None and no search_fields passed."""
|
||||
|
||||
class AbstractCrud(AsyncCrud[User]):
|
||||
pass
|
||||
|
||||
assert AbstractCrud._resolve_search_columns(None) is None
|
||||
|
||||
def test_returns_none_when_empty_search_fields_passed(self):
|
||||
"""Returns None when an empty list is passed explicitly."""
|
||||
crud = CrudFactory(User)
|
||||
assert crud._resolve_search_columns([]) is None
|
||||
|
||||
def test_returns_keys_from_class_searchable_fields(self):
|
||||
"""Returns column keys from cls.searchable_fields when no override passed."""
|
||||
crud = CrudFactory(User, searchable_fields=[User.username])
|
||||
result = crud._resolve_search_columns(None)
|
||||
assert result is not None
|
||||
assert "username" in result
|
||||
|
||||
def test_search_fields_override_takes_priority(self):
|
||||
"""Explicit search_fields override cls.searchable_fields."""
|
||||
crud = CrudFactory(User, searchable_fields=[User.username])
|
||||
result = crud._resolve_search_columns([User.email])
|
||||
assert result is not None
|
||||
assert "email" in result
|
||||
assert "username" not in result
|
||||
|
||||
|
||||
class TestResolveOrderColumns:
|
||||
"""Tests for _resolve_order_columns logic."""
|
||||
|
||||
def test_returns_none_when_no_order_fields(self):
|
||||
"""Returns None when cls.order_fields is None and no order_fields passed."""
|
||||
|
||||
class AbstractCrud(AsyncCrud[User]):
|
||||
pass
|
||||
|
||||
assert AbstractCrud._resolve_order_columns(None) is None
|
||||
|
||||
def test_returns_none_when_empty_order_fields_passed(self):
|
||||
"""Returns None when an empty list is passed explicitly."""
|
||||
crud = CrudFactory(User)
|
||||
assert crud._resolve_order_columns([]) is None
|
||||
|
||||
def test_returns_keys_from_class_order_fields(self):
|
||||
"""Returns sorted column keys from cls.order_fields when no override passed."""
|
||||
crud = CrudFactory(User, order_fields=[User.username])
|
||||
result = crud._resolve_order_columns(None)
|
||||
assert result is not None
|
||||
assert "username" in result
|
||||
|
||||
def test_order_fields_override_takes_priority(self):
|
||||
"""Explicit order_fields override cls.order_fields."""
|
||||
crud = CrudFactory(User, order_fields=[User.username])
|
||||
result = crud._resolve_order_columns([User.email])
|
||||
assert result is not None
|
||||
assert "email" in result
|
||||
assert "username" not in result
|
||||
|
||||
def test_returns_sorted_keys(self):
|
||||
"""Keys are returned in sorted order."""
|
||||
crud = CrudFactory(User, order_fields=[User.email, User.username])
|
||||
result = crud._resolve_order_columns(None)
|
||||
assert result is not None
|
||||
assert result == sorted(result)
|
||||
|
||||
def test_relation_tuple_produces_dunder_key(self):
|
||||
"""A (rel, column) tuple produces a 'rel__column' key."""
|
||||
crud = CrudFactory(User, order_fields=[(User.role, Role.name)])
|
||||
result = crud._resolve_order_columns(None)
|
||||
assert result == ["role__name"]
|
||||
|
||||
def test_mixed_flat_and_relation_fields(self):
|
||||
"""Flat and relation fields can be mixed; keys are sorted."""
|
||||
crud = CrudFactory(User, order_fields=[User.username, (User.role, Role.name)])
|
||||
result = crud._resolve_order_columns(None)
|
||||
assert result is not None
|
||||
assert "username" in result
|
||||
assert "role__name" in result
|
||||
assert result == sorted(result)
|
||||
|
||||
|
||||
class TestDefaultLoadOptionsIntegration:
|
||||
"""Integration tests for default_load_options with real DB queries."""
|
||||
|
||||
@@ -290,6 +380,43 @@ class TestDefaultLoadOptionsIntegration:
|
||||
assert result.data[0].role is not None
|
||||
assert result.data[0].role.name == "admin"
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_default_load_options_applied_to_create(
|
||||
self, db_session: AsyncSession
|
||||
):
|
||||
"""default_load_options loads relationships after create()."""
|
||||
UserWithDefaultLoad = CrudFactory(
|
||||
User, default_load_options=[selectinload(User.role)]
|
||||
)
|
||||
role = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||
user = await UserWithDefaultLoad.create(
|
||||
db_session,
|
||||
UserCreate(username="alice", email="alice@test.com", role_id=role.id),
|
||||
)
|
||||
assert user.role is not None
|
||||
assert user.role.name == "admin"
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_default_load_options_applied_to_update(
|
||||
self, db_session: AsyncSession
|
||||
):
|
||||
"""default_load_options loads relationships after update()."""
|
||||
UserWithDefaultLoad = CrudFactory(
|
||||
User, default_load_options=[selectinload(User.role)]
|
||||
)
|
||||
role = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||
user = await UserCrud.create(
|
||||
db_session,
|
||||
UserCreate(username="alice", email="alice@test.com"),
|
||||
)
|
||||
updated = await UserWithDefaultLoad.update(
|
||||
db_session,
|
||||
UserUpdate(role_id=role.id),
|
||||
filters=[User.id == user.id],
|
||||
)
|
||||
assert updated.role is not None
|
||||
assert updated.role.name == "admin"
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_load_options_overrides_default_load_options(
|
||||
self, db_session: AsyncSession
|
||||
@@ -1250,6 +1377,128 @@ class TestCrudJoins:
|
||||
assert users[0].username == "multi_join"
|
||||
|
||||
|
||||
class TestCrudAliasedJoins:
|
||||
"""Tests for CRUD operations with aliased joins (same table joined twice)."""
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_get_multi_with_aliased_joins(self, db_session: AsyncSession):
|
||||
"""Aliased joins allow joining the same table twice."""
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
alice = await UserCrud.create(
|
||||
db_session, UserCreate(username="alice", email="alice@test.com")
|
||||
)
|
||||
bob = await UserCrud.create(
|
||||
db_session, UserCreate(username="bob", email="bob@test.com")
|
||||
)
|
||||
await TransferCrud.create(
|
||||
db_session,
|
||||
TransferCreate(amount="100", sender_id=alice.id, receiver_id=bob.id),
|
||||
)
|
||||
|
||||
Sender = aliased(User)
|
||||
Receiver = aliased(User)
|
||||
|
||||
results = await TransferCrud.get_multi(
|
||||
db_session,
|
||||
joins=[
|
||||
(Sender, Transfer.sender_id == Sender.id),
|
||||
(Receiver, Transfer.receiver_id == Receiver.id),
|
||||
],
|
||||
filters=[Sender.username == "alice", Receiver.username == "bob"],
|
||||
)
|
||||
assert len(results) == 1
|
||||
assert results[0].amount == "100"
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_get_multi_aliased_no_match(self, db_session: AsyncSession):
|
||||
"""Aliased joins correctly filter out non-matching rows."""
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
alice = await UserCrud.create(
|
||||
db_session, UserCreate(username="alice", email="alice@test.com")
|
||||
)
|
||||
bob = await UserCrud.create(
|
||||
db_session, UserCreate(username="bob", email="bob@test.com")
|
||||
)
|
||||
await TransferCrud.create(
|
||||
db_session,
|
||||
TransferCreate(amount="100", sender_id=alice.id, receiver_id=bob.id),
|
||||
)
|
||||
|
||||
Sender = aliased(User)
|
||||
Receiver = aliased(User)
|
||||
|
||||
# bob is receiver, not sender — should return nothing
|
||||
results = await TransferCrud.get_multi(
|
||||
db_session,
|
||||
joins=[
|
||||
(Sender, Transfer.sender_id == Sender.id),
|
||||
(Receiver, Transfer.receiver_id == Receiver.id),
|
||||
],
|
||||
filters=[Sender.username == "bob", Receiver.username == "alice"],
|
||||
)
|
||||
assert len(results) == 0
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_paginate_with_aliased_joins(self, db_session: AsyncSession):
|
||||
"""Aliased joins work with offset_paginate."""
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
alice = await UserCrud.create(
|
||||
db_session, UserCreate(username="alice", email="alice@test.com")
|
||||
)
|
||||
bob = await UserCrud.create(
|
||||
db_session, UserCreate(username="bob", email="bob@test.com")
|
||||
)
|
||||
await TransferCrud.create(
|
||||
db_session,
|
||||
TransferCreate(amount="50", sender_id=alice.id, receiver_id=bob.id),
|
||||
)
|
||||
await TransferCrud.create(
|
||||
db_session,
|
||||
TransferCreate(amount="75", sender_id=bob.id, receiver_id=alice.id),
|
||||
)
|
||||
|
||||
Sender = aliased(User)
|
||||
result = await TransferCrud.offset_paginate(
|
||||
db_session,
|
||||
joins=[(Sender, Transfer.sender_id == Sender.id)],
|
||||
filters=[Sender.username == "alice"],
|
||||
schema=TransferRead,
|
||||
)
|
||||
assert result.pagination.total_count == 1
|
||||
assert result.data[0].amount == "50"
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_count_with_aliased_join(self, db_session: AsyncSession):
|
||||
"""Aliased joins work with count."""
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
alice = await UserCrud.create(
|
||||
db_session, UserCreate(username="alice", email="alice@test.com")
|
||||
)
|
||||
bob = await UserCrud.create(
|
||||
db_session, UserCreate(username="bob", email="bob@test.com")
|
||||
)
|
||||
await TransferCrud.create(
|
||||
db_session,
|
||||
TransferCreate(amount="10", sender_id=alice.id, receiver_id=bob.id),
|
||||
)
|
||||
await TransferCrud.create(
|
||||
db_session,
|
||||
TransferCreate(amount="20", sender_id=alice.id, receiver_id=bob.id),
|
||||
)
|
||||
|
||||
Sender = aliased(User)
|
||||
count = await TransferCrud.count(
|
||||
db_session,
|
||||
joins=[(Sender, Transfer.sender_id == Sender.id)],
|
||||
filters=[Sender.username == "alice"],
|
||||
)
|
||||
assert count == 2
|
||||
|
||||
|
||||
class TestCrudFactoryM2M:
|
||||
"""Tests for CrudFactory with m2m_fields parameter."""
|
||||
|
||||
@@ -1759,6 +2008,52 @@ class TestSchemaResponse:
|
||||
assert result.data[0].username == "pg_user"
|
||||
assert not hasattr(result.data[0], "email")
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_include_total_false_skips_count(self, db_session: AsyncSession):
|
||||
"""offset_paginate with include_total=False returns total_count=None."""
|
||||
from fastapi_toolsets.schemas import OffsetPagination
|
||||
|
||||
for i in range(5):
|
||||
await RoleCrud.create(db_session, RoleCreate(name=f"role{i:02d}"))
|
||||
|
||||
result = await RoleCrud.offset_paginate(
|
||||
db_session, items_per_page=10, include_total=False, schema=RoleRead
|
||||
)
|
||||
|
||||
assert isinstance(result.pagination, OffsetPagination)
|
||||
assert result.pagination.total_count is None
|
||||
assert len(result.data) == 5
|
||||
assert result.pagination.has_more is False
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_include_total_false_has_more_true(self, db_session: AsyncSession):
|
||||
"""offset_paginate with include_total=False sets has_more via extra-row probe."""
|
||||
for i in range(15):
|
||||
await RoleCrud.create(db_session, RoleCreate(name=f"role{i:02d}"))
|
||||
|
||||
result = await RoleCrud.offset_paginate(
|
||||
db_session, items_per_page=10, include_total=False, schema=RoleRead
|
||||
)
|
||||
|
||||
assert result.pagination.total_count is None
|
||||
assert result.pagination.has_more is True
|
||||
assert len(result.data) == 10
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_include_total_false_exact_page_boundary(
|
||||
self, db_session: AsyncSession
|
||||
):
|
||||
"""offset_paginate with include_total=False: has_more=False when items == page size."""
|
||||
for i in range(10):
|
||||
await RoleCrud.create(db_session, RoleCreate(name=f"role{i:02d}"))
|
||||
|
||||
result = await RoleCrud.offset_paginate(
|
||||
db_session, items_per_page=10, include_total=False, schema=RoleRead
|
||||
)
|
||||
|
||||
assert result.pagination.has_more is False
|
||||
assert len(result.data) == 10
|
||||
|
||||
|
||||
class TestCursorPaginate:
|
||||
"""Tests for cursor-based pagination via cursor_paginate()."""
|
||||
@@ -2520,4 +2815,21 @@ class TestPaginate:
|
||||
db_session,
|
||||
pagination_type="unknown",
|
||||
schema=RoleRead,
|
||||
) # type: ignore[no-matching-overload]
|
||||
) # type: ignore[no-matching-overload] # ty:ignore[no-matching-overload]
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_offset_include_total_false(self, db_session: AsyncSession):
|
||||
"""paginate() passes include_total=False through to offset_paginate."""
|
||||
from fastapi_toolsets.schemas import OffsetPagination
|
||||
|
||||
await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||
|
||||
result = await RoleCrud.paginate(
|
||||
db_session,
|
||||
pagination_type=PaginationType.OFFSET,
|
||||
include_total=False,
|
||||
schema=RoleRead,
|
||||
)
|
||||
|
||||
assert isinstance(result.pagination, OffsetPagination)
|
||||
assert result.pagination.total_count is None
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -68,6 +68,55 @@ class TestCreateDbDependency:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
await engine.dispose()
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_in_transaction_on_yield(self):
|
||||
"""Session is already in a transaction when the endpoint body starts."""
|
||||
engine = create_async_engine(DATABASE_URL, echo=False)
|
||||
session_factory = async_sessionmaker(engine, expire_on_commit=False)
|
||||
get_db = create_db_dependency(session_factory)
|
||||
|
||||
async for session in get_db():
|
||||
assert session.in_transaction()
|
||||
break
|
||||
|
||||
await engine.dispose()
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_update_after_lock_tables_is_persisted(self):
|
||||
"""Changes made after lock_tables exits (before endpoint returns) are committed.
|
||||
|
||||
Regression: without the auto-begin fix, lock_tables would start and commit a
|
||||
real outer transaction, leaving the session idle. Any modifications after that
|
||||
point were silently dropped.
|
||||
"""
|
||||
engine = create_async_engine(DATABASE_URL, echo=False)
|
||||
session_factory = async_sessionmaker(engine, expire_on_commit=False)
|
||||
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
try:
|
||||
get_db = create_db_dependency(session_factory)
|
||||
|
||||
async for session in get_db():
|
||||
async with lock_tables(session, [Role]):
|
||||
role = Role(name="lock_then_update")
|
||||
session.add(role)
|
||||
await session.flush()
|
||||
# lock_tables has exited — outer transaction must still be open
|
||||
assert session.in_transaction()
|
||||
role.name = "updated_after_lock"
|
||||
|
||||
async with session_factory() as verify:
|
||||
result = await RoleCrud.first(
|
||||
verify, [Role.name == "updated_after_lock"]
|
||||
)
|
||||
assert result is not None
|
||||
finally:
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
class TestCreateDbContext:
|
||||
"""Tests for create_db_context."""
|
||||
@@ -363,7 +412,8 @@ class TestCreateDatabase:
|
||||
.set(database="test_create_db_general")
|
||||
.render_as_string(hide_password=False)
|
||||
)
|
||||
expected_db: str = make_url(target_url).database # type: ignore[assignment]
|
||||
expected_db = make_url(target_url).database
|
||||
assert expected_db is not None
|
||||
|
||||
engine = create_async_engine(DATABASE_URL, isolation_level="AUTOCOMMIT")
|
||||
try:
|
||||
|
||||
@@ -20,7 +20,7 @@ from .conftest import Role, RoleCreate, RoleCrud, User
|
||||
|
||||
async def mock_get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Mock session dependency for testing."""
|
||||
yield None
|
||||
yield None # type: ignore[misc] # ty:ignore[invalid-yield]
|
||||
|
||||
|
||||
MockSessionDep = Annotated[AsyncSession, Depends(mock_get_db)]
|
||||
|
||||
@@ -10,12 +10,13 @@ import datetime
|
||||
import pytest
|
||||
from fastapi import FastAPI
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from docs_src.examples.pagination_search.db import get_db
|
||||
from docs_src.examples.pagination_search.models import Article, Base, Category
|
||||
from docs_src.examples.pagination_search.routes import router
|
||||
from fastapi_toolsets.exceptions import init_exceptions_handlers
|
||||
from fastapi_toolsets.pytest import create_db_session
|
||||
|
||||
from .conftest import DATABASE_URL
|
||||
|
||||
@@ -35,20 +36,8 @@ def build_app(session: AsyncSession) -> FastAPI:
|
||||
@pytest.fixture(scope="function")
|
||||
async def ex_db_session():
|
||||
"""Isolated session for the example models (separate tables from conftest)."""
|
||||
engine = create_async_engine(DATABASE_URL, echo=False)
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
session_factory = async_sessionmaker(engine, expire_on_commit=False)
|
||||
session = session_factory()
|
||||
|
||||
try:
|
||||
async with create_db_session(DATABASE_URL, Base) as session:
|
||||
yield session
|
||||
finally:
|
||||
await session.close()
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -108,7 +97,7 @@ class TestAppSessionDep:
|
||||
gen = get_db()
|
||||
session = await gen.__anext__()
|
||||
assert isinstance(session, AsyncSession)
|
||||
await session.close()
|
||||
await gen.aclose()
|
||||
|
||||
|
||||
class TestOffsetPagination:
|
||||
@@ -193,8 +182,7 @@ class TestOffsetPagination:
|
||||
body = resp.json()
|
||||
fa = body["filter_attributes"]
|
||||
assert set(fa["status"]) == {"draft", "published"}
|
||||
# "name" is unique across all facet fields — no prefix needed
|
||||
assert set(fa["name"]) == {"backend", "python"}
|
||||
assert set(fa["category__name"]) == {"backend", "python"}
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_filter_attributes_scoped_to_filter(
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Tests for fastapi_toolsets.fixtures module."""
|
||||
|
||||
import uuid
|
||||
from enum import Enum
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
@@ -13,10 +14,22 @@ from fastapi_toolsets.fixtures import (
|
||||
load_fixtures,
|
||||
load_fixtures_by_context,
|
||||
)
|
||||
from fastapi_toolsets.fixtures.utils import _get_primary_key, _instance_to_dict
|
||||
|
||||
from fastapi_toolsets.fixtures.utils import _get_primary_key
|
||||
from .conftest import IntRole, Permission, Role, RoleCreate, RoleCrud, User, UserCrud
|
||||
|
||||
from .conftest import IntRole, Permission, Role, User
|
||||
|
||||
class AppContext(str, Enum):
|
||||
"""Example user-defined str+Enum context."""
|
||||
|
||||
STAGING = "staging"
|
||||
DEMO = "demo"
|
||||
|
||||
|
||||
class PlainEnumContext(Enum):
|
||||
"""Example user-defined plain Enum context (no str mixin)."""
|
||||
|
||||
STAGING = "staging"
|
||||
|
||||
|
||||
class TestContext:
|
||||
@@ -39,6 +52,86 @@ class TestContext:
|
||||
assert Context.TESTING.value == "testing"
|
||||
|
||||
|
||||
class TestCustomEnumContext:
|
||||
"""Custom Enum types are accepted wherever Context/str are expected."""
|
||||
|
||||
def test_cannot_subclass_context_with_members(self):
|
||||
"""Python prohibits extending an Enum that already has members."""
|
||||
with pytest.raises(TypeError):
|
||||
|
||||
class MyContext(Context): # noqa: F841 # ty: ignore[subclass-of-final-class]
|
||||
STAGING = "staging"
|
||||
|
||||
def test_custom_enum_values_interchangeable_with_context(self):
|
||||
"""A custom enum with the same .value as a built-in Context member is
|
||||
treated as the same context — fixtures registered under one are found
|
||||
by the other."""
|
||||
|
||||
class AppContextFull(str, Enum):
|
||||
BASE = "base"
|
||||
STAGING = "staging"
|
||||
|
||||
registry = FixtureRegistry()
|
||||
|
||||
@registry.register(contexts=[Context.BASE])
|
||||
def roles():
|
||||
return []
|
||||
|
||||
# AppContextFull.BASE has value "base" — same as Context.BASE
|
||||
fixtures = registry.get_by_context(AppContextFull.BASE)
|
||||
assert len(fixtures) == 1
|
||||
|
||||
def test_custom_enum_registry_default_contexts(self):
|
||||
"""FixtureRegistry(contexts=[...]) accepts a custom Enum."""
|
||||
registry = FixtureRegistry(contexts=[AppContext.STAGING])
|
||||
|
||||
@registry.register
|
||||
def data():
|
||||
return []
|
||||
|
||||
fixture = registry.get("data")
|
||||
assert fixture.contexts == ["staging"]
|
||||
|
||||
def test_custom_enum_resolve_context_dependencies(self):
|
||||
"""resolve_context_dependencies accepts a custom Enum context."""
|
||||
registry = FixtureRegistry()
|
||||
|
||||
@registry.register(contexts=[AppContext.STAGING])
|
||||
def staging_roles():
|
||||
return []
|
||||
|
||||
order = registry.resolve_context_dependencies(AppContext.STAGING)
|
||||
assert "staging_roles" in order
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_custom_enum_e2e(self, db_session: AsyncSession):
|
||||
"""End-to-end: register with custom Enum, load with the same Enum."""
|
||||
registry = FixtureRegistry()
|
||||
|
||||
@registry.register(contexts=[AppContext.STAGING])
|
||||
def staging_roles():
|
||||
return [Role(id=uuid.uuid4(), name="staging-admin")]
|
||||
|
||||
result = await load_fixtures_by_context(
|
||||
db_session, registry, AppContext.STAGING
|
||||
)
|
||||
assert len(result["staging_roles"]) == 1
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_plain_enum_e2e(self, db_session: AsyncSession):
|
||||
"""End-to-end: register with plain Enum, load with the same Enum."""
|
||||
registry = FixtureRegistry()
|
||||
|
||||
@registry.register(contexts=[PlainEnumContext.STAGING])
|
||||
def staging_roles():
|
||||
return [Role(id=uuid.uuid4(), name="plain-staging-admin")]
|
||||
|
||||
result = await load_fixtures_by_context(
|
||||
db_session, registry, PlainEnumContext.STAGING
|
||||
)
|
||||
assert len(result["staging_roles"]) == 1
|
||||
|
||||
|
||||
class TestLoadStrategy:
|
||||
"""Tests for LoadStrategy enum."""
|
||||
|
||||
@@ -407,6 +500,37 @@ class TestDependencyResolution:
|
||||
with pytest.raises(ValueError, match="Circular dependency"):
|
||||
registry.resolve_dependencies("a")
|
||||
|
||||
def test_resolve_raises_for_unknown_dependency(self):
|
||||
"""KeyError when depends_on references an unregistered fixture."""
|
||||
registry = FixtureRegistry()
|
||||
|
||||
@registry.register(depends_on=["ghost"])
|
||||
def users():
|
||||
return []
|
||||
|
||||
with pytest.raises(KeyError, match="ghost"):
|
||||
registry.resolve_dependencies("users")
|
||||
|
||||
def test_resolve_deduplicates_shared_depends_on_across_variants(self):
|
||||
"""A dep shared by two same-name variants appears only once in the order."""
|
||||
registry = FixtureRegistry()
|
||||
|
||||
@registry.register(contexts=[Context.BASE])
|
||||
def roles():
|
||||
return []
|
||||
|
||||
@registry.register(depends_on=["roles"], contexts=[Context.BASE])
|
||||
def items():
|
||||
return []
|
||||
|
||||
@registry.register(depends_on=["roles"], contexts=[Context.TESTING])
|
||||
def items(): # noqa: F811
|
||||
return []
|
||||
|
||||
order = registry.resolve_dependencies("items")
|
||||
assert order.count("roles") == 1
|
||||
assert order.index("roles") < order.index("items")
|
||||
|
||||
def test_resolve_context_dependencies(self):
|
||||
"""Resolve all fixtures for a context with dependencies."""
|
||||
registry = FixtureRegistry()
|
||||
@@ -447,8 +571,6 @@ class TestLoadFixtures:
|
||||
assert "roles" in result
|
||||
assert len(result["roles"]) == 2
|
||||
|
||||
from .conftest import RoleCrud
|
||||
|
||||
count = await RoleCrud.count(db_session)
|
||||
assert count == 2
|
||||
|
||||
@@ -479,8 +601,6 @@ class TestLoadFixtures:
|
||||
assert "roles" in result
|
||||
assert "users" in result
|
||||
|
||||
from .conftest import RoleCrud, UserCrud
|
||||
|
||||
assert await RoleCrud.count(db_session) == 1
|
||||
assert await UserCrud.count(db_session) == 1
|
||||
|
||||
@@ -497,11 +617,55 @@ class TestLoadFixtures:
|
||||
await load_fixtures(db_session, registry, "roles", strategy=LoadStrategy.MERGE)
|
||||
await load_fixtures(db_session, registry, "roles", strategy=LoadStrategy.MERGE)
|
||||
|
||||
from .conftest import RoleCrud
|
||||
|
||||
count = await RoleCrud.count(db_session)
|
||||
assert count == 1
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_merge_does_not_overwrite_omitted_nullable_columns(
|
||||
self, db_session: AsyncSession
|
||||
):
|
||||
"""MERGE must not clear nullable columns that the fixture didn't set.
|
||||
|
||||
When a fixture omits a nullable column (e.g. role_id or notes), a re-merge
|
||||
must leave the existing DB value untouched — not overwrite it with NULL.
|
||||
"""
|
||||
registry = FixtureRegistry()
|
||||
admin = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||
uid = uuid.uuid4()
|
||||
|
||||
# First load: user has role_id and notes set
|
||||
@registry.register
|
||||
def users():
|
||||
return [
|
||||
User(
|
||||
id=uid,
|
||||
username="alice",
|
||||
email="a@test.com",
|
||||
role_id=admin.id,
|
||||
notes="original",
|
||||
)
|
||||
]
|
||||
|
||||
await load_fixtures(db_session, registry, "users", strategy=LoadStrategy.MERGE)
|
||||
|
||||
# Second load: fixture omits role_id and notes
|
||||
registry2 = FixtureRegistry()
|
||||
|
||||
@registry2.register
|
||||
def users(): # noqa: F811
|
||||
return [User(id=uid, username="alice-updated", email="a@test.com")]
|
||||
|
||||
await load_fixtures(db_session, registry2, "users", strategy=LoadStrategy.MERGE)
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
row = (
|
||||
await db_session.execute(select(User).where(User.id == uid))
|
||||
).scalar_one()
|
||||
assert row.username == "alice-updated" # updated column changed
|
||||
assert row.role_id == admin.id # omitted → preserved
|
||||
assert row.notes == "original" # omitted → preserved
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_load_with_skip_existing_strategy(self, db_session: AsyncSession):
|
||||
"""Load fixtures with SKIP_EXISTING strategy."""
|
||||
@@ -526,8 +690,6 @@ class TestLoadFixtures:
|
||||
db_session, registry, "roles", strategy=LoadStrategy.SKIP_EXISTING
|
||||
)
|
||||
|
||||
from .conftest import RoleCrud
|
||||
|
||||
role = await RoleCrud.first(db_session, [Role.id == role_id])
|
||||
assert role is not None
|
||||
assert role.name == "original"
|
||||
@@ -553,8 +715,6 @@ class TestLoadFixtures:
|
||||
assert "roles" in result
|
||||
assert len(result["roles"]) == 2
|
||||
|
||||
from .conftest import RoleCrud
|
||||
|
||||
count = await RoleCrud.count(db_session)
|
||||
assert count == 2
|
||||
|
||||
@@ -594,8 +754,6 @@ class TestLoadFixtures:
|
||||
assert "roles" in result
|
||||
assert "other_roles" in result
|
||||
|
||||
from .conftest import RoleCrud
|
||||
|
||||
count = await RoleCrud.count(db_session)
|
||||
assert count == 2
|
||||
|
||||
@@ -660,8 +818,6 @@ class TestLoadFixturesByContext:
|
||||
|
||||
await load_fixtures_by_context(db_session, registry, Context.BASE)
|
||||
|
||||
from .conftest import RoleCrud
|
||||
|
||||
count = await RoleCrud.count(db_session)
|
||||
assert count == 1
|
||||
|
||||
@@ -688,8 +844,6 @@ class TestLoadFixturesByContext:
|
||||
db_session, registry, Context.BASE, Context.TESTING
|
||||
)
|
||||
|
||||
from .conftest import RoleCrud
|
||||
|
||||
count = await RoleCrud.count(db_session)
|
||||
assert count == 2
|
||||
|
||||
@@ -717,8 +871,6 @@ class TestLoadFixturesByContext:
|
||||
|
||||
await load_fixtures_by_context(db_session, registry, Context.TESTING)
|
||||
|
||||
from .conftest import RoleCrud, UserCrud
|
||||
|
||||
assert await RoleCrud.count(db_session) == 1
|
||||
assert await UserCrud.count(db_session) == 1
|
||||
|
||||
@@ -813,3 +965,511 @@ class TestGetPrimaryKey:
|
||||
instance = Permission(subject="post") # action is None
|
||||
pk = _get_primary_key(instance)
|
||||
assert pk is None
|
||||
|
||||
|
||||
class TestRegistryGetVariants:
|
||||
"""Tests for FixtureRegistry.get and get_variants edge cases."""
|
||||
|
||||
def test_get_raises_value_error_for_multi_variant(self):
|
||||
"""get() raises ValueError when the fixture has multiple context variants."""
|
||||
registry = FixtureRegistry()
|
||||
|
||||
@registry.register(contexts=[Context.BASE])
|
||||
def items():
|
||||
return []
|
||||
|
||||
@registry.register(contexts=[Context.TESTING])
|
||||
def items(): # noqa: F811
|
||||
return []
|
||||
|
||||
with pytest.raises(ValueError, match="get_variants"):
|
||||
registry.get("items")
|
||||
|
||||
def test_get_variants_raises_key_error_for_unknown(self):
|
||||
"""get_variants() raises KeyError for an unregistered name."""
|
||||
registry = FixtureRegistry()
|
||||
with pytest.raises(KeyError, match="not found"):
|
||||
registry.get_variants("no_such_fixture")
|
||||
|
||||
|
||||
class TestInstanceToDict:
|
||||
"""Unit tests for the _instance_to_dict helper."""
|
||||
|
||||
def test_explicit_values_included(self):
|
||||
"""All explicitly set column values appear in the result."""
|
||||
role_id = uuid.uuid4()
|
||||
instance = Role(id=role_id, name="admin")
|
||||
d = _instance_to_dict(instance)
|
||||
assert d["id"] == role_id
|
||||
assert d["name"] == "admin"
|
||||
|
||||
def test_callable_default_none_excluded(self):
|
||||
"""A column whose value is None but has a callable Python-side default
|
||||
(e.g. ``default=uuid.uuid4``) is excluded so the DB generates it."""
|
||||
instance = Role(id=None, name="admin")
|
||||
d = _instance_to_dict(instance)
|
||||
assert "id" not in d
|
||||
assert d["name"] == "admin"
|
||||
|
||||
def test_autoincrement_none_excluded(self):
|
||||
"""A column whose value is None but has autoincrement=True is excluded
|
||||
so the DB generates the value via its sequence."""
|
||||
instance = IntRole(id=None, name="admin")
|
||||
d = _instance_to_dict(instance)
|
||||
assert "id" not in d
|
||||
assert d["name"] == "admin"
|
||||
|
||||
def test_nullable_none_included(self):
|
||||
"""None on a nullable column with no default is kept (explicit NULL)."""
|
||||
instance = User(id=uuid.uuid4(), username="u", email="e@e.com", role_id=None)
|
||||
d = _instance_to_dict(instance)
|
||||
assert "role_id" in d
|
||||
assert d["role_id"] is None
|
||||
|
||||
def test_nullable_str_no_default_omitted_not_in_dict(self):
|
||||
"""Mapped[str | None] with no default, not provided in constructor, is absent from dict."""
|
||||
instance = User(id=uuid.uuid4(), username="u", email="e@e.com")
|
||||
d = _instance_to_dict(instance)
|
||||
assert "notes" not in d
|
||||
|
||||
def test_nullable_str_no_default_explicit_none_included(self):
|
||||
"""Mapped[str | None] with no default, explicitly set to None, is included as NULL."""
|
||||
instance = User(id=uuid.uuid4(), username="u", email="e@e.com", notes=None)
|
||||
d = _instance_to_dict(instance)
|
||||
assert "notes" in d
|
||||
assert d["notes"] is None
|
||||
|
||||
def test_nullable_str_no_default_with_value_included(self):
|
||||
"""Mapped[str | None] with no default and a value set is included normally."""
|
||||
instance = User(id=uuid.uuid4(), username="u", email="e@e.com", notes="hello")
|
||||
d = _instance_to_dict(instance)
|
||||
assert d["notes"] == "hello"
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_nullable_str_no_default_insert_roundtrip(
|
||||
self, db_session: AsyncSession
|
||||
):
|
||||
"""Fixture loading works for models with Mapped[str | None] (no default).
|
||||
|
||||
Both the omitted-value (→ NULL) and explicit-None paths must insert without error.
|
||||
"""
|
||||
registry = FixtureRegistry()
|
||||
|
||||
uid_a = uuid.uuid4()
|
||||
uid_b = uuid.uuid4()
|
||||
uid_c = uuid.uuid4()
|
||||
|
||||
@registry.register
|
||||
def users():
|
||||
return [
|
||||
User(
|
||||
id=uid_a, username="no_notes", email="a@test.com"
|
||||
), # notes omitted
|
||||
User(
|
||||
id=uid_b, username="null_notes", email="b@test.com", notes=None
|
||||
), # explicit None
|
||||
User(
|
||||
id=uid_c, username="has_notes", email="c@test.com", notes="hi"
|
||||
), # value set
|
||||
]
|
||||
|
||||
result = await load_fixtures(db_session, registry, "users")
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
rows = (
|
||||
(await db_session.execute(select(User).order_by(User.username)))
|
||||
.scalars()
|
||||
.all()
|
||||
)
|
||||
by_username = {r.username: r for r in rows}
|
||||
|
||||
assert by_username["no_notes"].notes is None
|
||||
assert by_username["null_notes"].notes is None
|
||||
assert by_username["has_notes"].notes == "hi"
|
||||
assert len(result["users"]) == 3
|
||||
|
||||
|
||||
class TestBatchMergeNonPkColumns:
|
||||
"""Batch MERGE on a model with no non-PK columns (PK-only table)."""
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_merge_pk_only_model(self, db_session: AsyncSession):
|
||||
"""MERGE strategy on a PK-only model uses on_conflict_do_nothing."""
|
||||
registry = FixtureRegistry()
|
||||
|
||||
@registry.register
|
||||
def permissions():
|
||||
return [
|
||||
Permission(subject="post", action="read"),
|
||||
Permission(subject="post", action="write"),
|
||||
]
|
||||
|
||||
result = await load_fixtures(
|
||||
db_session, registry, "permissions", strategy=LoadStrategy.MERGE
|
||||
)
|
||||
assert len(result["permissions"]) == 2
|
||||
|
||||
# Run again — conflicts are silently ignored.
|
||||
result2 = await load_fixtures(
|
||||
db_session, registry, "permissions", strategy=LoadStrategy.MERGE
|
||||
)
|
||||
assert len(result2["permissions"]) == 2
|
||||
|
||||
|
||||
class TestBatchNullableColumnEdgeCases:
|
||||
"""Deep tests for nullable column handling during batch import."""
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_insert_batch_mixed_nullable_fk(self, db_session: AsyncSession):
|
||||
"""INSERT batch where some rows set a nullable FK and others don't.
|
||||
|
||||
After normalization the omitted role_id becomes None. For INSERT this
|
||||
is acceptable — both rows should insert successfully with the correct
|
||||
values (one with FK, one with NULL).
|
||||
"""
|
||||
registry = FixtureRegistry()
|
||||
admin = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||
uid1 = uuid.uuid4()
|
||||
uid2 = uuid.uuid4()
|
||||
|
||||
@registry.register
|
||||
def users():
|
||||
return [
|
||||
User(
|
||||
id=uid1, username="with_role", email="a@test.com", role_id=admin.id
|
||||
),
|
||||
User(id=uid2, username="no_role", email="b@test.com"),
|
||||
]
|
||||
|
||||
await load_fixtures(db_session, registry, "users", strategy=LoadStrategy.INSERT)
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
rows = {
|
||||
r.username: r
|
||||
for r in (await db_session.execute(select(User))).scalars().all()
|
||||
}
|
||||
assert rows["with_role"].role_id == admin.id
|
||||
assert rows["no_role"].role_id is None
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_insert_batch_mixed_nullable_notes(self, db_session: AsyncSession):
|
||||
"""INSERT batch where some rows have notes and others don't.
|
||||
|
||||
Ensures normalization doesn't break the insert and that each row gets
|
||||
the intended value.
|
||||
"""
|
||||
registry = FixtureRegistry()
|
||||
uid1 = uuid.uuid4()
|
||||
uid2 = uuid.uuid4()
|
||||
uid3 = uuid.uuid4()
|
||||
|
||||
@registry.register
|
||||
def users():
|
||||
return [
|
||||
User(
|
||||
id=uid1,
|
||||
username="has_notes",
|
||||
email="a@test.com",
|
||||
notes="important",
|
||||
),
|
||||
User(id=uid2, username="no_notes", email="b@test.com"),
|
||||
User(id=uid3, username="null_notes", email="c@test.com", notes=None),
|
||||
]
|
||||
|
||||
await load_fixtures(db_session, registry, "users", strategy=LoadStrategy.INSERT)
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
rows = {
|
||||
r.username: r
|
||||
for r in (await db_session.execute(select(User))).scalars().all()
|
||||
}
|
||||
assert rows["has_notes"].notes == "important"
|
||||
assert rows["no_notes"].notes is None
|
||||
assert rows["null_notes"].notes is None
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_merge_batch_mixed_nullable_does_not_overwrite(
|
||||
self, db_session: AsyncSession
|
||||
):
|
||||
"""MERGE batch where one row sets a nullable column and another omits it.
|
||||
|
||||
If both rows already exist in DB, the row that omits the column must
|
||||
NOT have its existing value overwritten with NULL.
|
||||
|
||||
This is the core normalization bug: _normalize_rows fills missing keys
|
||||
with None, and then MERGE's SET clause includes that column for ALL rows.
|
||||
"""
|
||||
from sqlalchemy import select
|
||||
|
||||
admin = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||
uid1 = uuid.uuid4()
|
||||
uid2 = uuid.uuid4()
|
||||
|
||||
# Pre-populate: both users have role_id and notes
|
||||
registry_initial = FixtureRegistry()
|
||||
|
||||
@registry_initial.register
|
||||
def users():
|
||||
return [
|
||||
User(
|
||||
id=uid1,
|
||||
username="alice",
|
||||
email="a@test.com",
|
||||
role_id=admin.id,
|
||||
notes="alice notes",
|
||||
),
|
||||
User(
|
||||
id=uid2,
|
||||
username="bob",
|
||||
email="b@test.com",
|
||||
role_id=admin.id,
|
||||
notes="bob notes",
|
||||
),
|
||||
]
|
||||
|
||||
await load_fixtures(
|
||||
db_session, registry_initial, "users", strategy=LoadStrategy.INSERT
|
||||
)
|
||||
|
||||
# Re-merge: alice updates notes, bob omits notes entirely
|
||||
registry_merge = FixtureRegistry()
|
||||
|
||||
@registry_merge.register
|
||||
def users(): # noqa: F811
|
||||
return [
|
||||
User(
|
||||
id=uid1,
|
||||
username="alice",
|
||||
email="a@test.com",
|
||||
role_id=admin.id,
|
||||
notes="updated",
|
||||
),
|
||||
User(
|
||||
id=uid2,
|
||||
username="bob",
|
||||
email="b@test.com",
|
||||
role_id=admin.id,
|
||||
), # notes omitted
|
||||
]
|
||||
|
||||
await load_fixtures(
|
||||
db_session, registry_merge, "users", strategy=LoadStrategy.MERGE
|
||||
)
|
||||
|
||||
rows = {
|
||||
r.username: r
|
||||
for r in (await db_session.execute(select(User))).scalars().all()
|
||||
}
|
||||
assert rows["alice"].notes == "updated"
|
||||
# Bob's notes must be preserved, NOT overwritten with NULL
|
||||
assert rows["bob"].notes == "bob notes"
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_merge_batch_mixed_nullable_fk_preserves_existing(
|
||||
self, db_session: AsyncSession
|
||||
):
|
||||
"""MERGE batch where one row sets role_id and another omits it.
|
||||
|
||||
The row that omits role_id must keep its existing DB value.
|
||||
"""
|
||||
from sqlalchemy import select
|
||||
|
||||
admin = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||
editor = await RoleCrud.create(db_session, RoleCreate(name="editor"))
|
||||
uid1 = uuid.uuid4()
|
||||
uid2 = uuid.uuid4()
|
||||
|
||||
# Pre-populate
|
||||
registry_initial = FixtureRegistry()
|
||||
|
||||
@registry_initial.register
|
||||
def users():
|
||||
return [
|
||||
User(
|
||||
id=uid1,
|
||||
username="alice",
|
||||
email="a@test.com",
|
||||
role_id=admin.id,
|
||||
),
|
||||
User(
|
||||
id=uid2,
|
||||
username="bob",
|
||||
email="b@test.com",
|
||||
role_id=editor.id,
|
||||
),
|
||||
]
|
||||
|
||||
await load_fixtures(
|
||||
db_session, registry_initial, "users", strategy=LoadStrategy.INSERT
|
||||
)
|
||||
|
||||
# Re-merge: alice changes role, bob omits role_id
|
||||
registry_merge = FixtureRegistry()
|
||||
|
||||
@registry_merge.register
|
||||
def users(): # noqa: F811
|
||||
return [
|
||||
User(
|
||||
id=uid1,
|
||||
username="alice",
|
||||
email="a@test.com",
|
||||
role_id=editor.id,
|
||||
),
|
||||
User(id=uid2, username="bob", email="b@test.com"), # role_id omitted
|
||||
]
|
||||
|
||||
await load_fixtures(
|
||||
db_session, registry_merge, "users", strategy=LoadStrategy.MERGE
|
||||
)
|
||||
|
||||
rows = {
|
||||
r.username: r
|
||||
for r in (await db_session.execute(select(User))).scalars().all()
|
||||
}
|
||||
assert rows["alice"].role_id == editor.id # updated
|
||||
assert rows["bob"].role_id == editor.id # must be preserved, NOT NULL
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_insert_batch_mixed_pk_presence(self, db_session: AsyncSession):
|
||||
"""INSERT batch where some rows have explicit PK and others rely on
|
||||
the callable default (uuid.uuid4).
|
||||
|
||||
Normalization adds the PK key with None to rows that omitted it,
|
||||
which can cause NOT NULL violations on the PK column.
|
||||
"""
|
||||
registry = FixtureRegistry()
|
||||
explicit_id = uuid.uuid4()
|
||||
|
||||
@registry.register
|
||||
def roles():
|
||||
return [
|
||||
Role(id=explicit_id, name="admin"),
|
||||
Role(name="user"), # PK omitted, relies on default=uuid.uuid4
|
||||
]
|
||||
|
||||
await load_fixtures(db_session, registry, "roles", strategy=LoadStrategy.INSERT)
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
rows = (await db_session.execute(select(Role))).scalars().all()
|
||||
assert len(rows) == 2
|
||||
names = {r.name for r in rows}
|
||||
assert names == {"admin", "user"}
|
||||
# The "admin" row must have the explicit ID
|
||||
admin = next(r for r in rows if r.name == "admin")
|
||||
assert admin.id == explicit_id
|
||||
# The "user" row must have a generated UUID (not None)
|
||||
user = next(r for r in rows if r.name == "user")
|
||||
assert user.id is not None
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_skip_existing_batch_mixed_nullable(self, db_session: AsyncSession):
|
||||
"""SKIP_EXISTING with mixed nullable columns inserts correctly.
|
||||
|
||||
Only new rows are inserted; existing rows are untouched regardless of
|
||||
which columns the fixture provides.
|
||||
"""
|
||||
from sqlalchemy import select
|
||||
|
||||
admin = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||
uid1 = uuid.uuid4()
|
||||
uid2 = uuid.uuid4()
|
||||
|
||||
# Pre-populate uid1 with notes
|
||||
registry_initial = FixtureRegistry()
|
||||
|
||||
@registry_initial.register
|
||||
def users():
|
||||
return [
|
||||
User(
|
||||
id=uid1,
|
||||
username="alice",
|
||||
email="a@test.com",
|
||||
role_id=admin.id,
|
||||
notes="keep me",
|
||||
),
|
||||
]
|
||||
|
||||
await load_fixtures(
|
||||
db_session, registry_initial, "users", strategy=LoadStrategy.INSERT
|
||||
)
|
||||
|
||||
# Load again with SKIP_EXISTING: uid1 already exists, uid2 is new
|
||||
registry_skip = FixtureRegistry()
|
||||
|
||||
@registry_skip.register
|
||||
def users(): # noqa: F811
|
||||
return [
|
||||
User(id=uid1, username="alice-updated", email="a@test.com"), # exists
|
||||
User(
|
||||
id=uid2,
|
||||
username="bob",
|
||||
email="b@test.com",
|
||||
notes="new user",
|
||||
), # new
|
||||
]
|
||||
|
||||
result = await load_fixtures(
|
||||
db_session, registry_skip, "users", strategy=LoadStrategy.SKIP_EXISTING
|
||||
)
|
||||
assert len(result["users"]) == 1 # only bob inserted
|
||||
|
||||
rows = {
|
||||
r.username: r
|
||||
for r in (await db_session.execute(select(User))).scalars().all()
|
||||
}
|
||||
# alice untouched
|
||||
assert rows["alice"].role_id == admin.id
|
||||
assert rows["alice"].notes == "keep me"
|
||||
# bob inserted correctly
|
||||
assert rows["bob"].notes == "new user"
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_insert_batch_every_row_different_nullable_columns(
|
||||
self, db_session: AsyncSession
|
||||
):
|
||||
"""Each row in the batch sets a different combination of nullable columns.
|
||||
|
||||
Tests that normalization produces valid SQL for all rows.
|
||||
"""
|
||||
registry = FixtureRegistry()
|
||||
admin = await RoleCrud.create(db_session, RoleCreate(name="admin"))
|
||||
uid1 = uuid.uuid4()
|
||||
uid2 = uuid.uuid4()
|
||||
uid3 = uuid.uuid4()
|
||||
|
||||
@registry.register
|
||||
def users():
|
||||
return [
|
||||
User(
|
||||
id=uid1,
|
||||
username="all_set",
|
||||
email="a@test.com",
|
||||
role_id=admin.id,
|
||||
notes="full",
|
||||
),
|
||||
User(
|
||||
id=uid2, username="only_role", email="b@test.com", role_id=admin.id
|
||||
),
|
||||
User(
|
||||
id=uid3, username="only_notes", email="c@test.com", notes="partial"
|
||||
),
|
||||
]
|
||||
|
||||
await load_fixtures(db_session, registry, "users", strategy=LoadStrategy.INSERT)
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
rows = {
|
||||
r.username: r
|
||||
for r in (await db_session.execute(select(User))).scalars().all()
|
||||
}
|
||||
assert rows["all_set"].role_id == admin.id
|
||||
assert rows["all_set"].notes == "full"
|
||||
assert rows["only_role"].role_id == admin.id
|
||||
assert rows["only_role"].notes is None
|
||||
assert rows["only_notes"].role_id is None
|
||||
assert rows["only_notes"].notes == "partial"
|
||||
|
||||
@@ -101,7 +101,7 @@ class TestMetricsImportGuard:
|
||||
with patch("builtins.__import__", side_effect=blocking_import):
|
||||
mod = importlib.import_module("fastapi_toolsets.metrics")
|
||||
with pytest.raises(ImportError, match="prometheus_client"):
|
||||
mod.init_metrics(None, None) # type: ignore[arg-type]
|
||||
mod.init_metrics(None, None) # type: ignore[arg-type] # ty:ignore[invalid-argument-type]
|
||||
finally:
|
||||
for key in list(sys.modules):
|
||||
if key.startswith("fastapi_toolsets.metrics"):
|
||||
@@ -171,8 +171,15 @@ class TestPytestImportGuard:
|
||||
class TestCliImportGuard:
|
||||
"""Tests for CLI module import guard when typer is missing."""
|
||||
|
||||
def test_import_raises_without_typer(self):
|
||||
"""Importing cli.app raises when typer is missing."""
|
||||
@pytest.mark.parametrize(
|
||||
"expected_match",
|
||||
[
|
||||
"typer",
|
||||
r"pip install fastapi-toolsets\[cli\]",
|
||||
],
|
||||
)
|
||||
def test_import_raises_without_typer(self, expected_match):
|
||||
"""Importing cli.app raises when typer is missing, with an informative error message."""
|
||||
saved, blocking_import = _reload_without_package(
|
||||
"fastapi_toolsets.cli.app", ["typer"]
|
||||
)
|
||||
@@ -186,33 +193,7 @@ class TestCliImportGuard:
|
||||
|
||||
try:
|
||||
with patch("builtins.__import__", side_effect=blocking_import):
|
||||
with pytest.raises(ImportError, match="typer"):
|
||||
importlib.import_module("fastapi_toolsets.cli.app")
|
||||
finally:
|
||||
for key in list(sys.modules):
|
||||
if key.startswith("fastapi_toolsets.cli.app") or key.startswith(
|
||||
"fastapi_toolsets.cli.config"
|
||||
):
|
||||
sys.modules.pop(key, None)
|
||||
sys.modules.update(saved)
|
||||
|
||||
def test_error_message_suggests_cli_extra(self):
|
||||
"""Error message suggests installing the cli extra."""
|
||||
saved, blocking_import = _reload_without_package(
|
||||
"fastapi_toolsets.cli.app", ["typer"]
|
||||
)
|
||||
config_keys = [
|
||||
k for k in sys.modules if k.startswith("fastapi_toolsets.cli.config")
|
||||
]
|
||||
for key in config_keys:
|
||||
if key not in saved:
|
||||
saved[key] = sys.modules.pop(key)
|
||||
|
||||
try:
|
||||
with patch("builtins.__import__", side_effect=blocking_import):
|
||||
with pytest.raises(
|
||||
ImportError, match=r"pip install fastapi-toolsets\[cli\]"
|
||||
):
|
||||
with pytest.raises(ImportError, match=expected_match):
|
||||
importlib.import_module("fastapi_toolsets.cli.app")
|
||||
finally:
|
||||
for key in list(sys.modules):
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Tests for fastapi_toolsets.metrics module."""
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
@@ -287,6 +286,16 @@ class TestIncludeRegistry:
|
||||
class TestInitMetrics:
|
||||
"""Tests for init_metrics function."""
|
||||
|
||||
@pytest.fixture
|
||||
def metrics_client(self):
|
||||
"""Create a FastAPI app with MetricsRegistry and return a TestClient."""
|
||||
app = FastAPI()
|
||||
registry = MetricsRegistry()
|
||||
init_metrics(app, registry)
|
||||
client = TestClient(app)
|
||||
yield client
|
||||
client.close()
|
||||
|
||||
def test_returns_app(self):
|
||||
"""Returns the FastAPI app."""
|
||||
app = FastAPI()
|
||||
@@ -294,26 +303,14 @@ class TestInitMetrics:
|
||||
result = init_metrics(app, registry)
|
||||
assert result is app
|
||||
|
||||
def test_metrics_endpoint_responds(self):
|
||||
def test_metrics_endpoint_responds(self, metrics_client):
|
||||
"""The /metrics endpoint returns 200."""
|
||||
app = FastAPI()
|
||||
registry = MetricsRegistry()
|
||||
init_metrics(app, registry)
|
||||
|
||||
client = TestClient(app)
|
||||
response = client.get("/metrics")
|
||||
|
||||
response = metrics_client.get("/metrics")
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_metrics_endpoint_content_type(self):
|
||||
def test_metrics_endpoint_content_type(self, metrics_client):
|
||||
"""The /metrics endpoint returns prometheus content type."""
|
||||
app = FastAPI()
|
||||
registry = MetricsRegistry()
|
||||
init_metrics(app, registry)
|
||||
|
||||
client = TestClient(app)
|
||||
response = client.get("/metrics")
|
||||
|
||||
response = metrics_client.get("/metrics")
|
||||
assert "text/plain" in response.headers["content-type"]
|
||||
|
||||
def test_custom_path(self):
|
||||
@@ -445,36 +442,33 @@ class TestInitMetrics:
|
||||
class TestMultiProcessMode:
|
||||
"""Tests for multi-process Prometheus mode."""
|
||||
|
||||
def test_multiprocess_with_env_var(self):
|
||||
def test_multiprocess_with_env_var(self, monkeypatch):
|
||||
"""Multi-process mode works when PROMETHEUS_MULTIPROC_DIR is set."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
os.environ["PROMETHEUS_MULTIPROC_DIR"] = tmpdir
|
||||
try:
|
||||
# Use a separate registry to avoid conflicts with default
|
||||
prom_registry = CollectorRegistry()
|
||||
app = FastAPI()
|
||||
registry = MetricsRegistry()
|
||||
monkeypatch.setenv("PROMETHEUS_MULTIPROC_DIR", tmpdir)
|
||||
# Use a separate registry to avoid conflicts with default
|
||||
prom_registry = CollectorRegistry()
|
||||
app = FastAPI()
|
||||
registry = MetricsRegistry()
|
||||
|
||||
@registry.register
|
||||
def mp_counter():
|
||||
return Counter(
|
||||
"mp_test_counter",
|
||||
"A multiprocess counter",
|
||||
registry=prom_registry,
|
||||
)
|
||||
@registry.register
|
||||
def mp_counter():
|
||||
return Counter(
|
||||
"mp_test_counter",
|
||||
"A multiprocess counter",
|
||||
registry=prom_registry,
|
||||
)
|
||||
|
||||
init_metrics(app, registry)
|
||||
init_metrics(app, registry)
|
||||
|
||||
client = TestClient(app)
|
||||
response = client.get("/metrics")
|
||||
client = TestClient(app)
|
||||
response = client.get("/metrics")
|
||||
|
||||
assert response.status_code == 200
|
||||
finally:
|
||||
del os.environ["PROMETHEUS_MULTIPROC_DIR"]
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_single_process_without_env_var(self):
|
||||
def test_single_process_without_env_var(self, monkeypatch):
|
||||
"""Single-process mode when PROMETHEUS_MULTIPROC_DIR is not set."""
|
||||
os.environ.pop("PROMETHEUS_MULTIPROC_DIR", None)
|
||||
monkeypatch.delenv("PROMETHEUS_MULTIPROC_DIR", raising=False)
|
||||
|
||||
app = FastAPI()
|
||||
registry = MetricsRegistry()
|
||||
|
||||
1435
tests/test_models.py
1435
tests/test_models.py
File diff suppressed because it is too large
Load Diff
@@ -7,9 +7,10 @@ from fastapi import Depends, FastAPI
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy import select, text
|
||||
from sqlalchemy.engine import make_url
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from fastapi_toolsets.db import get_transaction
|
||||
from fastapi_toolsets.fixtures import Context, FixtureRegistry
|
||||
from fastapi_toolsets.pytest import (
|
||||
create_async_client,
|
||||
@@ -336,6 +337,42 @@ class TestCreateDbSession:
|
||||
result = await session.execute(select(Role))
|
||||
assert result.all() == []
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_get_transaction_commits_visible_to_separate_session(self):
|
||||
"""Data written via get_transaction() is committed and visible to other sessions."""
|
||||
role_id = uuid.uuid4()
|
||||
|
||||
async with create_db_session(DATABASE_URL, Base, drop_tables=False) as session:
|
||||
# Simulate what _create_fixture_function does: insert via get_transaction
|
||||
# with no explicit commit afterward.
|
||||
async with get_transaction(session):
|
||||
role = Role(id=role_id, name="visible_to_other_session")
|
||||
session.add(role)
|
||||
|
||||
# The data must have been committed (begin/commit, not a savepoint),
|
||||
# so a separate engine/session can read it.
|
||||
other_engine = create_async_engine(DATABASE_URL, echo=False)
|
||||
try:
|
||||
other_session_maker = async_sessionmaker(
|
||||
other_engine, expire_on_commit=False
|
||||
)
|
||||
async with other_session_maker() as other:
|
||||
result = await other.execute(select(Role).where(Role.id == role_id))
|
||||
fetched = result.scalar_one_or_none()
|
||||
assert fetched is not None, (
|
||||
"Fixture data inserted via get_transaction() must be committed "
|
||||
"and visible to a separate session. If create_db_session uses "
|
||||
"create_db_context, auto-begin forces get_transaction() into "
|
||||
"savepoints instead of real commits."
|
||||
)
|
||||
assert fetched.name == "visible_to_other_session"
|
||||
finally:
|
||||
await other_engine.dispose()
|
||||
|
||||
# Cleanup
|
||||
async with create_db_session(DATABASE_URL, Base, drop_tables=True) as _:
|
||||
pass
|
||||
|
||||
|
||||
class TestGetXdistWorker:
|
||||
"""Tests for _get_xdist_worker helper."""
|
||||
|
||||
@@ -201,6 +201,88 @@ class TestOffsetPagination:
|
||||
assert data["page"] == 2
|
||||
assert data["has_more"] is True
|
||||
|
||||
def test_total_count_can_be_none(self):
|
||||
"""total_count accepts None (include_total=False mode)."""
|
||||
pagination = OffsetPagination(
|
||||
total_count=None,
|
||||
items_per_page=20,
|
||||
page=1,
|
||||
has_more=True,
|
||||
)
|
||||
assert pagination.total_count is None
|
||||
|
||||
def test_serialization_with_none_total_count(self):
|
||||
"""OffsetPagination serializes total_count=None correctly."""
|
||||
pagination = OffsetPagination(
|
||||
total_count=None,
|
||||
items_per_page=20,
|
||||
page=1,
|
||||
has_more=False,
|
||||
)
|
||||
data = pagination.model_dump()
|
||||
assert data["total_count"] is None
|
||||
|
||||
def test_pages_computed(self):
|
||||
"""pages is ceil(total_count / items_per_page)."""
|
||||
pagination = OffsetPagination(
|
||||
total_count=42,
|
||||
items_per_page=10,
|
||||
page=1,
|
||||
has_more=True,
|
||||
)
|
||||
assert pagination.pages == 5
|
||||
|
||||
def test_pages_exact_division(self):
|
||||
"""pages is exact when total_count is evenly divisible."""
|
||||
pagination = OffsetPagination(
|
||||
total_count=40,
|
||||
items_per_page=10,
|
||||
page=1,
|
||||
has_more=False,
|
||||
)
|
||||
assert pagination.pages == 4
|
||||
|
||||
def test_pages_zero_total(self):
|
||||
"""pages is 0 when total_count is 0."""
|
||||
pagination = OffsetPagination(
|
||||
total_count=0,
|
||||
items_per_page=10,
|
||||
page=1,
|
||||
has_more=False,
|
||||
)
|
||||
assert pagination.pages == 0
|
||||
|
||||
def test_pages_zero_items_per_page(self):
|
||||
"""pages is 0 when items_per_page is 0."""
|
||||
pagination = OffsetPagination(
|
||||
total_count=100,
|
||||
items_per_page=0,
|
||||
page=1,
|
||||
has_more=False,
|
||||
)
|
||||
assert pagination.pages == 0
|
||||
|
||||
def test_pages_none_when_total_count_none(self):
|
||||
"""pages is None when total_count is None (include_total=False)."""
|
||||
pagination = OffsetPagination(
|
||||
total_count=None,
|
||||
items_per_page=20,
|
||||
page=1,
|
||||
has_more=True,
|
||||
)
|
||||
assert pagination.pages is None
|
||||
|
||||
def test_pages_in_serialization(self):
|
||||
"""pages appears in model_dump output."""
|
||||
pagination = OffsetPagination(
|
||||
total_count=25,
|
||||
items_per_page=10,
|
||||
page=1,
|
||||
has_more=True,
|
||||
)
|
||||
data = pagination.model_dump()
|
||||
assert data["pages"] == 3
|
||||
|
||||
|
||||
class TestCursorPagination:
|
||||
"""Tests for CursorPagination schema."""
|
||||
@@ -469,7 +551,7 @@ class TestOffsetPaginatedResponse:
|
||||
pagination=OffsetPagination(
|
||||
total_count=0, items_per_page=10, page=1, has_more=False
|
||||
),
|
||||
pagination_type=PaginationType.CURSOR, # type: ignore[arg-type]
|
||||
pagination_type=PaginationType.CURSOR, # type: ignore[arg-type] # ty:ignore[invalid-argument-type]
|
||||
)
|
||||
|
||||
def test_filter_attributes_defaults_to_none(self):
|
||||
@@ -556,7 +638,7 @@ class TestCursorPaginatedResponse:
|
||||
pagination=CursorPagination(
|
||||
next_cursor=None, items_per_page=10, has_more=False
|
||||
),
|
||||
pagination_type=PaginationType.OFFSET, # type: ignore[arg-type]
|
||||
pagination_type=PaginationType.OFFSET, # type: ignore[arg-type] # ty:ignore[invalid-argument-type]
|
||||
)
|
||||
|
||||
def test_full_serialization(self):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
268
uv.lock
generated
268
uv.lock
generated
@@ -81,76 +81,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bcrypt"
|
||||
version = "5.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386, upload-time = "2025-09-25T19:50:47.829Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/13/85/3e65e01985fddf25b64ca67275bb5bdb4040bd1a53b66d355c6c37c8a680/bcrypt-5.0.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f3c08197f3039bec79cee59a606d62b96b16669cff3949f21e74796b6e3cd2be", size = 481806, upload-time = "2025-09-25T19:49:05.102Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/dc/01eb79f12b177017a726cbf78330eb0eb442fae0e7b3dfd84ea2849552f3/bcrypt-5.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:200af71bc25f22006f4069060c88ed36f8aa4ff7f53e67ff04d2ab3f1e79a5b2", size = 268626, upload-time = "2025-09-25T19:49:06.723Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/cf/e82388ad5959c40d6afd94fb4743cc077129d45b952d46bdc3180310e2df/bcrypt-5.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:baade0a5657654c2984468efb7d6c110db87ea63ef5a4b54732e7e337253e44f", size = 271853, upload-time = "2025-09-25T19:49:08.028Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/86/7134b9dae7cf0efa85671651341f6afa695857fae172615e960fb6a466fa/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c58b56cdfb03202b3bcc9fd8daee8e8e9b6d7e3163aa97c631dfcfcc24d36c86", size = 269793, upload-time = "2025-09-25T19:49:09.727Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/82/6296688ac1b9e503d034e7d0614d56e80c5d1a08402ff856a4549cb59207/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4bfd2a34de661f34d0bda43c3e4e79df586e4716ef401fe31ea39d69d581ef23", size = 289930, upload-time = "2025-09-25T19:49:11.204Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/18/884a44aa47f2a3b88dd09bc05a1e40b57878ecd111d17e5bba6f09f8bb77/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ed2e1365e31fc73f1825fa830f1c8f8917ca1b3ca6185773b349c20fd606cec2", size = 272194, upload-time = "2025-09-25T19:49:12.524Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/8f/371a3ab33c6982070b674f1788e05b656cfbf5685894acbfef0c65483a59/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:83e787d7a84dbbfba6f250dd7a5efd689e935f03dd83b0f919d39349e1f23f83", size = 269381, upload-time = "2025-09-25T19:49:14.308Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/34/7e4e6abb7a8778db6422e88b1f06eb07c47682313997ee8a8f9352e5a6f1/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:137c5156524328a24b9fac1cb5db0ba618bc97d11970b39184c1d87dc4bf1746", size = 271750, upload-time = "2025-09-25T19:49:15.584Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/1b/54f416be2499bd72123c70d98d36c6cd61a4e33d9b89562c22481c81bb30/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:38cac74101777a6a7d3b3e3cfefa57089b5ada650dce2baf0cbdd9d65db22a9e", size = 303757, upload-time = "2025-09-25T19:49:17.244Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/62/062c24c7bcf9d2826a1a843d0d605c65a755bc98002923d01fd61270705a/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:d8d65b564ec849643d9f7ea05c6d9f0cd7ca23bdd4ac0c2dbef1104ab504543d", size = 306740, upload-time = "2025-09-25T19:49:18.693Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/c8/1fdbfc8c0f20875b6b4020f3c7dc447b8de60aa0be5faaf009d24242aec9/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:741449132f64b3524e95cd30e5cd3343006ce146088f074f31ab26b94e6c75ba", size = 334197, upload-time = "2025-09-25T19:49:20.523Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/c1/8b84545382d75bef226fbc6588af0f7b7d095f7cd6a670b42a86243183cd/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:212139484ab3207b1f0c00633d3be92fef3c5f0af17cad155679d03ff2ee1e41", size = 352974, upload-time = "2025-09-25T19:49:22.254Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/a6/ffb49d4254ed085e62e3e5dd05982b4393e32fe1e49bb1130186617c29cd/bcrypt-5.0.0-cp313-cp313t-win32.whl", hash = "sha256:9d52ed507c2488eddd6a95bccee4e808d3234fa78dd370e24bac65a21212b861", size = 148498, upload-time = "2025-09-25T19:49:24.134Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/a9/259559edc85258b6d5fc5471a62a3299a6aa37a6611a169756bf4689323c/bcrypt-5.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f6984a24db30548fd39a44360532898c33528b74aedf81c26cf29c51ee47057e", size = 145853, upload-time = "2025-09-25T19:49:25.702Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/df/9714173403c7e8b245acf8e4be8876aac64a209d1b392af457c79e60492e/bcrypt-5.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9fffdb387abe6aa775af36ef16f55e318dcda4194ddbf82007a6f21da29de8f5", size = 139626, upload-time = "2025-09-25T19:49:26.928Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/14/c18006f91816606a4abe294ccc5d1e6f0e42304df5a33710e9e8e95416e1/bcrypt-5.0.0-cp314-cp314t-macosx_10_12_universal2.whl", hash = "sha256:4870a52610537037adb382444fefd3706d96d663ac44cbb2f37e3919dca3d7ef", size = 481862, upload-time = "2025-09-25T19:49:28.365Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/49/dd074d831f00e589537e07a0725cf0e220d1f0d5d8e85ad5bbff251c45aa/bcrypt-5.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48f753100931605686f74e27a7b49238122aa761a9aefe9373265b8b7aa43ea4", size = 268544, upload-time = "2025-09-25T19:49:30.39Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/91/50ccba088b8c474545b034a1424d05195d9fcbaaf802ab8bfe2be5a4e0d7/bcrypt-5.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70aadb7a809305226daedf75d90379c397b094755a710d7014b8b117df1ebbf", size = 271787, upload-time = "2025-09-25T19:49:32.144Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/e7/d7dba133e02abcda3b52087a7eea8c0d4f64d3e593b4fffc10c31b7061f3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:744d3c6b164caa658adcb72cb8cc9ad9b4b75c7db507ab4bc2480474a51989da", size = 269753, upload-time = "2025-09-25T19:49:33.885Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/fc/5b145673c4b8d01018307b5c2c1fc87a6f5a436f0ad56607aee389de8ee3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a28bc05039bdf3289d757f49d616ab3efe8cf40d8e8001ccdd621cd4f98f4fc9", size = 289587, upload-time = "2025-09-25T19:49:35.144Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/d7/1ff22703ec6d4f90e62f1a5654b8867ef96bafb8e8102c2288333e1a6ca6/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7f277a4b3390ab4bebe597800a90da0edae882c6196d3038a73adf446c4f969f", size = 272178, upload-time = "2025-09-25T19:49:36.793Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/88/815b6d558a1e4d40ece04a2f84865b0fef233513bd85fd0e40c294272d62/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:79cfa161eda8d2ddf29acad370356b47f02387153b11d46042e93a0a95127493", size = 269295, upload-time = "2025-09-25T19:49:38.164Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/8c/e0db387c79ab4931fc89827d37608c31cc57b6edc08ccd2386139028dc0d/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a5393eae5722bcef046a990b84dff02b954904c36a194f6cfc817d7dca6c6f0b", size = 271700, upload-time = "2025-09-25T19:49:39.917Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/83/1570edddd150f572dbe9fc00f6203a89fc7d4226821f67328a85c330f239/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f4c94dec1b5ab5d522750cb059bb9409ea8872d4494fd152b53cca99f1ddd8c", size = 334034, upload-time = "2025-09-25T19:49:41.227Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/f2/ea64e51a65e56ae7a8a4ec236c2bfbdd4b23008abd50ac33fbb2d1d15424/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0cae4cb350934dfd74c020525eeae0a5f79257e8a201c0c176f4b84fdbf2a4b4", size = 352766, upload-time = "2025-09-25T19:49:43.08Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/d4/1a388d21ee66876f27d1a1f41287897d0c0f1712ef97d395d708ba93004c/bcrypt-5.0.0-cp314-cp314t-win32.whl", hash = "sha256:b17366316c654e1ad0306a6858e189fc835eca39f7eb2cafd6aaca8ce0c40a2e", size = 152449, upload-time = "2025-09-25T19:49:44.971Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/61/3291c2243ae0229e5bca5d19f4032cecad5dfb05a2557169d3a69dc0ba91/bcrypt-5.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:92864f54fb48b4c718fc92a32825d0e42265a627f956bc0361fe869f1adc3e7d", size = 149310, upload-time = "2025-09-25T19:49:46.162Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/89/4b01c52ae0c1a681d4021e5dd3e45b111a8fb47254a274fa9a378d8d834b/bcrypt-5.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dd19cf5184a90c873009244586396a6a884d591a5323f0e8a5922560718d4993", size = 143761, upload-time = "2025-09-25T19:49:47.345Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553, upload-time = "2025-09-25T19:49:49.006Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009, upload-time = "2025-09-25T19:49:50.581Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029, upload-time = "2025-09-25T19:49:52.533Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907, upload-time = "2025-09-25T19:49:54.709Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500, upload-time = "2025-09-25T19:49:56.013Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412, upload-time = "2025-09-25T19:49:57.356Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486, upload-time = "2025-09-25T19:49:59.116Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940, upload-time = "2025-09-25T19:50:00.869Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776, upload-time = "2025-09-25T19:50:02.393Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922, upload-time = "2025-09-25T19:50:04.232Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367, upload-time = "2025-09-25T19:50:05.559Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187, upload-time = "2025-09-25T19:50:06.916Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752, upload-time = "2025-09-25T19:50:08.515Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881, upload-time = "2025-09-25T19:50:09.742Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931, upload-time = "2025-09-25T19:50:11.016Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313, upload-time = "2025-09-25T19:50:12.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290, upload-time = "2025-09-25T19:50:13.673Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253, upload-time = "2025-09-25T19:50:15.089Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084, upload-time = "2025-09-25T19:50:16.699Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185, upload-time = "2025-09-25T19:50:18.525Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656, upload-time = "2025-09-25T19:50:19.809Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662, upload-time = "2025-09-25T19:50:21.567Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240, upload-time = "2025-09-25T19:50:23.305Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152, upload-time = "2025-09-25T19:50:24.597Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284, upload-time = "2025-09-25T19:50:26.268Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643, upload-time = "2025-09-25T19:50:28.02Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698, upload-time = "2025-09-25T19:50:31.347Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/75/4aa9f5a4d40d762892066ba1046000b329c7cd58e888a6db878019b282dc/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534", size = 271180, upload-time = "2025-09-25T19:50:38.575Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/79/875f9558179573d40a9cc743038ac2bf67dfb79cecb1e8b5d70e88c94c3d/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4", size = 273791, upload-time = "2025-09-25T19:50:39.913Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/fe/975adb8c216174bf70fc17535f75e85ac06ed5252ea077be10d9cff5ce24/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911", size = 270746, upload-time = "2025-09-25T19:50:43.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/f8/972c96f5a2b6c4b3deca57009d93e946bbdbe2241dca9806d502f29dd3ee/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4", size = 273375, upload-time = "2025-09-25T19:50:45.43Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2026.1.4"
|
||||
@@ -305,7 +235,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.135.1"
|
||||
version = "0.135.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-doc" },
|
||||
@@ -314,14 +244,14 @@ dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e7/7b/f8e0211e9380f7195ba3f3d40c292594fd81ba8ec4629e3854c353aaca45/fastapi-0.135.1.tar.gz", hash = "sha256:d04115b508d936d254cea545b7312ecaa58a7b3a0f84952535b4c9afae7668cd", size = 394962, upload-time = "2026-03-01T18:18:29.369Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f7/e6/7adb4c5fa231e82c35b8f5741a9f2d055f520c29af5546fd70d3e8e1cd2e/fastapi-0.135.3.tar.gz", hash = "sha256:bd6d7caf1a2bdd8d676843cdcd2287729572a1ef524fc4d65c17ae002a1be654", size = 396524, upload-time = "2026-04-01T16:23:58.188Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl", hash = "sha256:46e2fc5745924b7c840f71ddd277382af29ce1cdb7d5eab5bf697e3fb9999c9e", size = 116999, upload-time = "2026-03-01T18:18:30.831Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/a4/5caa2de7f917a04ada20018eccf60d6cc6145b0199d55ca3711b0fc08312/fastapi-0.135.3-py3-none-any.whl", hash = "sha256:9b0f590c813acd13d0ab43dd8494138eb58e484bfac405db1f3187cfc5810d98", size = 117734, upload-time = "2026-04-01T16:23:59.328Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi-toolsets"
|
||||
version = "2.3.0"
|
||||
version = "3.0.3"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "asyncpg" },
|
||||
@@ -352,11 +282,12 @@ pytest = [
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "bcrypt" },
|
||||
{ name = "coverage" },
|
||||
{ name = "fastapi-toolsets", extra = ["all"] },
|
||||
{ name = "httpx" },
|
||||
{ name = "mike" },
|
||||
{ name = "mkdocstrings-python" },
|
||||
{ name = "prek" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-anyio" },
|
||||
{ name = "pytest-cov" },
|
||||
@@ -366,12 +297,10 @@ dev = [
|
||||
{ name = "zensical" },
|
||||
]
|
||||
docs = [
|
||||
{ name = "mike" },
|
||||
{ name = "mkdocstrings-python" },
|
||||
{ name = "zensical" },
|
||||
]
|
||||
docs-src = [
|
||||
{ name = "bcrypt" },
|
||||
]
|
||||
tests = [
|
||||
{ name = "coverage" },
|
||||
{ name = "httpx" },
|
||||
@@ -398,24 +327,25 @@ provides-extras = ["cli", "metrics", "pytest", "all"]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "bcrypt", specifier = ">=4.0.0" },
|
||||
{ name = "coverage", specifier = ">=7.0.0" },
|
||||
{ name = "fastapi-toolsets", extras = ["all"] },
|
||||
{ name = "httpx", specifier = ">=0.25.0" },
|
||||
{ name = "mike", git = "https://github.com/squidfunk/mike.git?tag=2.2.0%2Bzensical-0.1.0" },
|
||||
{ name = "mkdocstrings-python", specifier = ">=2.0.2" },
|
||||
{ name = "prek", specifier = ">=0.3.8" },
|
||||
{ name = "pytest", specifier = ">=8.0.0" },
|
||||
{ name = "pytest-anyio", specifier = ">=0.0.0" },
|
||||
{ name = "pytest-cov", specifier = ">=4.0.0" },
|
||||
{ name = "pytest-xdist", specifier = ">=3.0.0" },
|
||||
{ name = "ruff", specifier = ">=0.1.0" },
|
||||
{ name = "ty", specifier = ">=0.0.1a0" },
|
||||
{ name = "zensical", specifier = ">=0.0.23" },
|
||||
{ name = "zensical", specifier = ">=0.0.30" },
|
||||
]
|
||||
docs = [
|
||||
{ name = "mike", git = "https://github.com/squidfunk/mike.git?tag=2.2.0%2Bzensical-0.1.0" },
|
||||
{ name = "mkdocstrings-python", specifier = ">=2.0.2" },
|
||||
{ name = "zensical", specifier = ">=0.0.23" },
|
||||
{ name = "zensical", specifier = ">=0.0.30" },
|
||||
]
|
||||
docs-src = [{ name = "bcrypt", specifier = ">=4.0.0" }]
|
||||
tests = [
|
||||
{ name = "coverage", specifier = ">=7.0.0" },
|
||||
{ name = "httpx", specifier = ">=0.25.0" },
|
||||
@@ -493,6 +423,7 @@ wheels = [
|
||||
name = "griffelib"
|
||||
version = "2.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ad/06/eccbd311c9e2b3ca45dbc063b93134c57a1ccc7607c5e545264ad092c4a9/griffelib-2.0.0.tar.gz", hash = "sha256:e504d637a089f5cab9b5daf18f7645970509bf4f53eda8d79ed71cce8bd97934", size = 166312, upload-time = "2026-03-23T21:06:55.954Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/51/c936033e16d12b627ea334aaaaf42229c37620d0f15593456ab69ab48161/griffelib-2.0.0-py3-none-any.whl", hash = "sha256:01284878c966508b6d6f1dbff9b6fa607bc062d8261c5c7253cb285b06422a7f", size = 142004, upload-time = "2026-02-09T19:09:40.561Z" },
|
||||
]
|
||||
@@ -677,6 +608,17 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354, upload-time = "2021-02-05T18:55:29.583Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mike"
|
||||
version = "2.2.0+zensical.0.1.0"
|
||||
source = { git = "https://github.com/squidfunk/mike.git?tag=2.2.0%2Bzensical-0.1.0#0f62791256ebeba60d20d2f1d8fe6ec3b7d1e2b3" }
|
||||
dependencies = [
|
||||
{ name = "jinja2" },
|
||||
{ name = "pyparsing" },
|
||||
{ name = "verspec" },
|
||||
{ name = "zensical" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mkdocs"
|
||||
version = "1.6.1"
|
||||
@@ -796,6 +738,30 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prek"
|
||||
version = "0.3.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/62/ee/03e8180e3fda9de25b6480bd15cc2bde40d573868d50648b0e527b35562f/prek-0.3.8.tar.gz", hash = "sha256:434a214256516f187a3ab15f869d950243be66b94ad47987ee4281b69643a2d9", size = 400224, upload-time = "2026-03-23T08:23:35.981Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/00/84/40d2ddf362d12c4cd4a25a8c89a862edf87cdfbf1422aa41aac8e315d409/prek-0.3.8-py3-none-linux_armv6l.whl", hash = "sha256:6fb646ada60658fa6dd7771b2e0fb097f005151be222f869dada3eb26d79ed33", size = 5226646, upload-time = "2026-03-23T08:23:18.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/52/7308a033fa43b7e8e188797bd2b3b017c0f0adda70fa7af575b1f43ea888/prek-0.3.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f3d7fdadb15efc19c09953c7a33cf2061a70f367d1e1957358d3ad5cc49d0616", size = 5620104, upload-time = "2026-03-23T08:23:40.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/b1/f106ac000a91511a9cd80169868daf2f5b693480ef5232cec5517a38a512/prek-0.3.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:72728c3295e79ca443f8c1ec037d2a5b914ec73a358f69cf1bc1964511876bf8", size = 5199867, upload-time = "2026-03-23T08:23:38.066Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/e9/970713f4b019f69de9844e1bab37b8ddb67558e410916f4eb5869a696165/prek-0.3.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:48efc28f2f53b5b8087efca9daaed91572d62df97d5f24a1c7a087fecb5017de", size = 5441801, upload-time = "2026-03-23T08:23:32.617Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/a4/7ef44032b181753e19452ec3b09abb3a32607cf6b0a0508f0604becaaf2b/prek-0.3.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f6ca9d63bacbc448a5c18e955c78d3ac5176c3a17c3baacdd949b1a623e08a36", size = 5155107, upload-time = "2026-03-23T08:23:31.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/77/4d9c8985dbba84149760785dfe07093ea1e29d710257dfb7c89615e2234c/prek-0.3.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1000f7029696b4fe712fb1fefd4c55b9c4de72b65509c8e50296370a06f9dc3f", size = 5566541, upload-time = "2026-03-23T08:23:45.694Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/1a/81e6769ac1f7f8346d09ce2ab0b47cf06466acd9ff72e87e5d1f0d98cd32/prek-0.3.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ff0bed0e2c1286522987d982168a86cbbd0d069d840506a46c9fda983515517", size = 6552991, upload-time = "2026-03-23T08:23:21.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/fa/ce2df0dd2dc75a9437a52463239d0782998943d7b04e191fb89b83016c34/prek-0.3.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fb087ac0ffda3ac65bbbae9a38326a7fd27ee007bb4a94323ce1eb539d8bbec", size = 5832972, upload-time = "2026-03-23T08:23:20.258Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/6b/9d4269df9073216d296244595a21c253b6475dfc9076c0bd2906be7a436c/prek-0.3.8-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:2e1e5e206ff7b31bd079cce525daddc96cd6bc544d20dc128921ad92f7a4c85d", size = 5448371, upload-time = "2026-03-23T08:23:41.835Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/1d/1e4d8a78abefa5b9d086e5a9f1638a74b5e540eec8a648d9946707701f29/prek-0.3.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:dcea3fe23832a4481bccb7c45f55650cb233be7c805602e788bb7dba60f2d861", size = 5270546, upload-time = "2026-03-23T08:23:24.231Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/07/34f36551a6319ae36e272bea63a42f59d41d2d47ab0d5fb00eb7b4e88e87/prek-0.3.8-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:4d25e647e9682f6818ab5c31e7a4b842993c14782a6ffcd128d22b784e0d677f", size = 5124032, upload-time = "2026-03-23T08:23:26.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/01/6d544009bb655e709993411796af77339f439526db4f3b3509c583ad8eb9/prek-0.3.8-py3-none-musllinux_1_1_i686.whl", hash = "sha256:de528b82935e33074815acff3c7c86026754d1212136295bc88fe9c43b4231d5", size = 5432245, upload-time = "2026-03-23T08:23:47.877Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/96/1237ee269e9bfa283ffadbcba1f401f48a47aed2b2563eb1002740d6079d/prek-0.3.8-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:6d660f1c25a126e6d9f682fe61449441226514f412a4469f5d71f8f8cad56db2", size = 5950550, upload-time = "2026-03-23T08:23:43.8Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/6b/a574411459049bc691047c9912f375deda10c44a707b6ce98df2b658f0b3/prek-0.3.8-py3-none-win32.whl", hash = "sha256:b0c291c577615d9f8450421dff0b32bfd77a6b0d223ee4115a1f820cb636fdf1", size = 4949501, upload-time = "2026-03-23T08:23:16.338Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/b4/46b59fe49f635acd9f6530778ce577f9d8b49452835726a5311ffc902c67/prek-0.3.8-py3-none-win_amd64.whl", hash = "sha256:bc147fdbdd4ec33fc7a987b893ecb69b1413ac100d95c9889a70f3fd58c73d06", size = 5346551, upload-time = "2026-03-23T08:23:34.501Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/05/9cca1708bb8c65264124eb4b04251e0f65ce5bfc707080bb6b492d5a0df7/prek-0.3.8-py3-none-win_arm64.whl", hash = "sha256:a2614647aeafa817a5802ccb9561e92eedc20dcf840639a1b00826e2c2442515", size = 5190872, upload-time = "2026-03-23T08:23:29.463Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prometheus-client"
|
||||
version = "0.24.1"
|
||||
@@ -919,24 +885,33 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
version = "2.20.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pymdown-extensions"
|
||||
version = "10.21"
|
||||
version = "10.21.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "markdown" },
|
||||
{ name = "pyyaml" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ba/63/06673d1eb6d8f83c0ea1f677d770e12565fb516928b4109c9e2055656a9e/pymdown_extensions-10.21.tar.gz", hash = "sha256:39f4a020f40773f6b2ff31d2cd2546c2c04d0a6498c31d9c688d2be07e1767d5", size = 853363, upload-time = "2026-02-15T20:44:06.748Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/df/08/f1c908c581fd11913da4711ea7ba32c0eee40b0190000996bb863b0c9349/pymdown_extensions-10.21.2.tar.gz", hash = "sha256:c3f55a5b8a1d0edf6699e35dcbea71d978d34ff3fa79f3d807b8a5b3fa90fbdc", size = 853922, upload-time = "2026-03-29T15:01:55.233Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/2c/5b079febdc65e1c3fb2729bf958d18b45be7113828528e8a0b5850dd819a/pymdown_extensions-10.21-py3-none-any.whl", hash = "sha256:91b879f9f864d49794c2d9534372b10150e6141096c3908a455e45ca72ad9d3f", size = 268877, upload-time = "2026-02-15T20:44:05.464Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/27/a2fc51a4a122dfd1015e921ae9d22fee3d20b0b8080d9a704578bf9deece/pymdown_extensions-10.21.2-py3-none-any.whl", hash = "sha256:5c0fd2a2bea14eb39af8ff284f1066d898ab2187d81b889b75d46d4348c01638", size = 268901, upload-time = "2026-03-29T15:01:53.244Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.3.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -970,16 +945,16 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pytest-cov"
|
||||
version = "7.0.0"
|
||||
version = "7.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "coverage", extra = ["toml"] },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b1/51/a849f96e117386044471c8ec2bd6cfebacda285da9525c9106aeb28da671/pytest_cov-7.1.0.tar.gz", hash = "sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2", size = 55592, upload-time = "2026-03-21T20:11:16.284Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/7a/d968e294073affff457b041c2be9868a40c1c71f4a35fcc1e45e5493067b/pytest_cov-7.1.0-py3-none-any.whl", hash = "sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678", size = 22876, upload-time = "2026-03-21T20:11:14.438Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1089,27 +1064,27 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.15.6"
|
||||
version = "0.15.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/51/df/f8629c19c5318601d3121e230f74cbee7a3732339c52b21daa2b82ef9c7d/ruff-0.15.6.tar.gz", hash = "sha256:8394c7bb153a4e3811a4ecdacd4a8e6a4fa8097028119160dffecdcdf9b56ae4", size = 4597916, upload-time = "2026-03-12T23:05:47.51Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/14/b0/73cf7550861e2b4824950b8b52eebdcc5adc792a00c514406556c5b80817/ruff-0.15.8.tar.gz", hash = "sha256:995f11f63597ee362130d1d5a327a87cb6f3f5eae3094c620bcc632329a4d26e", size = 4610921, upload-time = "2026-03-26T18:39:38.675Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/2f/4e03a7e5ce99b517e98d3b4951f411de2b0fa8348d39cf446671adcce9a2/ruff-0.15.6-py3-none-linux_armv6l.whl", hash = "sha256:7c98c3b16407b2cf3d0f2b80c80187384bc92c6774d85fefa913ecd941256fff", size = 10508953, upload-time = "2026-03-12T23:05:17.246Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/60/55bcdc3e9f80bcf39edf0cd272da6fa511a3d94d5a0dd9e0adf76ceebdb4/ruff-0.15.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ee7dcfaad8b282a284df4aa6ddc2741b3f4a18b0555d626805555a820ea181c3", size = 10942257, upload-time = "2026-03-12T23:05:23.076Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/f9/005c29bd1726c0f492bfa215e95154cf480574140cb5f867c797c18c790b/ruff-0.15.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3bd9967851a25f038fc8b9ae88a7fbd1b609f30349231dffaa37b6804923c4bb", size = 10322683, upload-time = "2026-03-12T23:05:33.738Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/74/2f861f5fd7cbb2146bddb5501450300ce41562da36d21868c69b7a828169/ruff-0.15.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13f4594b04e42cd24a41da653886b04d2ff87adbf57497ed4f728b0e8a4866f8", size = 10660986, upload-time = "2026-03-12T23:05:53.245Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/a1/309f2364a424eccb763cdafc49df843c282609f47fe53aa83f38272389e0/ruff-0.15.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2ed8aea2f3fe57886d3f00ea5b8aae5bf68d5e195f487f037a955ff9fbaac9e", size = 10332177, upload-time = "2026-03-12T23:05:56.145Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/41/7ebf1d32658b4bab20f8ac80972fb19cd4e2c6b78552be263a680edc55ac/ruff-0.15.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70789d3e7830b848b548aae96766431c0dc01a6c78c13381f423bf7076c66d15", size = 11170783, upload-time = "2026-03-12T23:06:01.742Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/be/6d488f6adca047df82cd62c304638bcb00821c36bd4881cfca221561fdfc/ruff-0.15.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:542aaf1de3154cea088ced5a819ce872611256ffe2498e750bbae5247a8114e9", size = 12044201, upload-time = "2026-03-12T23:05:28.697Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/68/e6f125df4af7e6d0b498f8d373274794bc5156b324e8ab4bf5c1b4fc0ec7/ruff-0.15.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c22e6f02c16cfac3888aa636e9eba857254d15bbacc9906c9689fdecb1953ab", size = 11421561, upload-time = "2026-03-12T23:05:31.236Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/9f/f85ef5fd01a52e0b472b26dc1b4bd228b8f6f0435975442ffa4741278703/ruff-0.15.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98893c4c0aadc8e448cfa315bd0cc343a5323d740fe5f28ef8a3f9e21b381f7e", size = 11310928, upload-time = "2026-03-12T23:05:45.288Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/26/b75f8c421f5654304b89471ed384ae8c7f42b4dff58fa6ce1626d7f2b59a/ruff-0.15.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:70d263770d234912374493e8cc1e7385c5d49376e41dfa51c5c3453169dc581c", size = 11235186, upload-time = "2026-03-12T23:05:50.677Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/d4/d5a6d065962ff7a68a86c9b4f5500f7d101a0792078de636526c0edd40da/ruff-0.15.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:55a1ad63c5a6e54b1f21b7514dfadc0c7fb40093fa22e95143cf3f64ebdcd512", size = 10635231, upload-time = "2026-03-12T23:05:37.044Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/56/7c3acf3d50910375349016cf33de24be021532042afbed87942858992491/ruff-0.15.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8dc473ba093c5ec238bb1e7429ee676dca24643c471e11fbaa8a857925b061c0", size = 10340357, upload-time = "2026-03-12T23:06:04.748Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/54/6faa39e9c1033ff6a3b6e76b5df536931cd30caf64988e112bbf91ef5ce5/ruff-0.15.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:85b042377c2a5561131767974617006f99f7e13c63c111b998f29fc1e58a4cfb", size = 10860583, upload-time = "2026-03-12T23:05:58.978Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/1e/509a201b843b4dfb0b32acdedf68d951d3377988cae43949ba4c4133a96a/ruff-0.15.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cef49e30bc5a86a6a92098a7fbf6e467a234d90b63305d6f3ec01225a9d092e0", size = 11410976, upload-time = "2026-03-12T23:05:39.955Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/25/3fc9114abf979a41673ce877c08016f8e660ad6cf508c3957f537d2e9fa9/ruff-0.15.6-py3-none-win32.whl", hash = "sha256:bbf67d39832404812a2d23020dda68fee7f18ce15654e96fb1d3ad21a5fe436c", size = 10616872, upload-time = "2026-03-12T23:05:42.451Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/7a/09ece68445ceac348df06e08bf75db72d0e8427765b96c9c0ffabc1be1d9/ruff-0.15.6-py3-none-win_amd64.whl", hash = "sha256:aee25bc84c2f1007ecb5037dff75cef00414fdf17c23f07dc13e577883dca406", size = 11787271, upload-time = "2026-03-12T23:05:20.168Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/d0/578c47dd68152ddddddf31cd7fc67dc30b7cdf639a86275fda821b0d9d98/ruff-0.15.6-py3-none-win_arm64.whl", hash = "sha256:c34de3dd0b0ba203be50ae70f5910b17188556630e2178fd7d79fc030eb0d837", size = 11060497, upload-time = "2026-03-12T23:05:25.968Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/92/c445b0cd6da6e7ae51e954939cb69f97e008dbe750cfca89b8cedc081be7/ruff-0.15.8-py3-none-linux_armv6l.whl", hash = "sha256:cbe05adeba76d58162762d6b239c9056f1a15a55bd4b346cfd21e26cd6ad7bc7", size = 10527394, upload-time = "2026-03-26T18:39:41.566Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/92/f1c662784d149ad1414cae450b082cf736430c12ca78367f20f5ed569d65/ruff-0.15.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d3e3d0b6ba8dca1b7ef9ab80a28e840a20070c4b62e56d675c24f366ef330570", size = 10905693, upload-time = "2026-03-26T18:39:30.364Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/f2/7a631a8af6d88bcef997eb1bf87cc3da158294c57044aafd3e17030613de/ruff-0.15.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6ee3ae5c65a42f273f126686353f2e08ff29927b7b7e203b711514370d500de3", size = 10323044, upload-time = "2026-03-26T18:39:33.37Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/18/1bf38e20914a05e72ef3b9569b1d5c70a7ef26cd188d69e9ca8ef588d5bf/ruff-0.15.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdce027ada77baa448077ccc6ebb2fa9c3c62fd110d8659d601cf2f475858d94", size = 10629135, upload-time = "2026-03-26T18:39:44.142Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/e9/138c150ff9af60556121623d41aba18b7b57d95ac032e177b6a53789d279/ruff-0.15.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12e617fc01a95e5821648a6df341d80456bd627bfab8a829f7cfc26a14a4b4a3", size = 10348041, upload-time = "2026-03-26T18:39:52.178Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/f1/5bfb9298d9c323f842c5ddeb85f1f10ef51516ac7a34ba446c9347d898df/ruff-0.15.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:432701303b26416d22ba696c39f2c6f12499b89093b61360abc34bcc9bf07762", size = 11121987, upload-time = "2026-03-26T18:39:55.195Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/11/6da2e538704e753c04e8d86b1fc55712fdbdcc266af1a1ece7a51fff0d10/ruff-0.15.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d910ae974b7a06a33a057cb87d2a10792a3b2b3b35e33d2699fdf63ec8f6b17a", size = 11951057, upload-time = "2026-03-26T18:39:19.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/f0/c9208c5fd5101bf87002fed774ff25a96eea313d305f1e5d5744698dc314/ruff-0.15.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2033f963c43949d51e6fdccd3946633c6b37c484f5f98c3035f49c27395a8ab8", size = 11464613, upload-time = "2026-03-26T18:40:06.301Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/22/d7f2fabdba4fae9f3b570e5605d5eb4500dcb7b770d3217dca4428484b17/ruff-0.15.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f29b989a55572fb885b77464cf24af05500806ab4edf9a0fd8977f9759d85b1", size = 11257557, upload-time = "2026-03-26T18:39:57.972Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/8c/382a9620038cf6906446b23ce8632ab8c0811b8f9d3e764f58bedd0c9a6f/ruff-0.15.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:ac51d486bf457cdc985a412fb1801b2dfd1bd8838372fc55de64b1510eff4bec", size = 11169440, upload-time = "2026-03-26T18:39:22.205Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/0d/0994c802a7eaaf99380085e4e40c845f8e32a562e20a38ec06174b52ef24/ruff-0.15.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c9861eb959edab053c10ad62c278835ee69ca527b6dcd72b47d5c1e5648964f6", size = 10605963, upload-time = "2026-03-26T18:39:46.682Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/aa/d624b86f5b0aad7cef6bbf9cd47a6a02dfdc4f72c92a337d724e39c9d14b/ruff-0.15.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8d9a5b8ea13f26ae90838afc33f91b547e61b794865374f114f349e9036835fb", size = 10357484, upload-time = "2026-03-26T18:39:49.176Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/c3/e0b7835d23001f7d999f3895c6b569927c4d39912286897f625736e1fd04/ruff-0.15.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c2a33a529fb3cbc23a7124b5c6ff121e4d6228029cba374777bd7649cc8598b8", size = 10830426, upload-time = "2026-03-26T18:40:03.702Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/51/ab20b322f637b369383adc341d761eaaa0f0203d6b9a7421cd6e783d81b9/ruff-0.15.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:75e5cd06b1cf3f47a3996cfc999226b19aa92e7cce682dcd62f80d7035f98f49", size = 11345125, upload-time = "2026-03-26T18:39:27.799Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/e6/90b2b33419f59d0f2c4c8a48a4b74b460709a557e8e0064cf33ad894f983/ruff-0.15.8-py3-none-win32.whl", hash = "sha256:bc1f0a51254ba21767bfa9a8b5013ca8149dcf38092e6a9eb704d876de94dc34", size = 10571959, upload-time = "2026-03-26T18:39:36.117Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/a2/ef467cb77099062317154c63f234b8a7baf7cb690b99af760c5b68b9ee7f/ruff-0.15.8-py3-none-win_amd64.whl", hash = "sha256:04f79eff02a72db209d47d665ba7ebcad609d8918a134f86cb13dd132159fc89", size = 11743893, upload-time = "2026-03-26T18:39:25.01Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/e2/77be4fff062fa78d9b2a4dea85d14785dac5f1d0c1fb58ed52331f0ebe28/ruff-0.15.8-py3-none-win_arm64.whl", hash = "sha256:cf891fa8e3bb430c0e7fac93851a5978fc99c8fa2c053b57b118972866f8e5f2", size = 11048175, upload-time = "2026-03-26T18:40:01.06Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1253,26 +1228,26 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ty"
|
||||
version = "0.0.23"
|
||||
version = "0.0.27"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/75/ba/d3c998ff4cf6b5d75b39356db55fe1b7caceecc522b9586174e6a5dee6f7/ty-0.0.23.tar.gz", hash = "sha256:5fb05db58f202af366f80ef70f806e48f5237807fe424ec787c9f289e3f3a4ef", size = 5341461, upload-time = "2026-03-13T12:34:23.125Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f4/de/e5cf1f151cf52fe1189e42d03d90909d7d1354fdc0c1847cbb63a0baa3da/ty-0.0.27.tar.gz", hash = "sha256:d7a8de3421d92420b40c94fe7e7d4816037560621903964dd035cf9bd0204a73", size = 5424130, upload-time = "2026-03-31T19:07:20.806Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/21/aab32603dfdfacd4819e52fa8c6074e7bd578218a5142729452fc6a62db6/ty-0.0.23-py3-none-linux_armv6l.whl", hash = "sha256:e810eef1a5f1cfc0731a58af8d2f334906a96835829767aed00026f1334a8dd7", size = 10329096, upload-time = "2026-03-13T12:34:09.432Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/a9/dd3287a82dce3df546ec560296208d4905dcf06346b6e18c2f3c63523bd1/ty-0.0.23-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e43d36bd89a151ddcad01acaeff7dcc507cb73ff164c1878d2d11549d39a061c", size = 10156631, upload-time = "2026-03-13T12:34:53.122Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/01/3f25909b02fac29bb0a62b2251f8d62e65d697781ffa4cf6b47a4c075c85/ty-0.0.23-py3-none-macosx_11_0_arm64.whl", hash = "sha256:bd6a340969577b4645f231572c4e46012acba2d10d4c0c6570fe1ab74e76ae00", size = 9653211, upload-time = "2026-03-13T12:34:15.049Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/60/bfc0479572a6f4b90501c869635faf8d84c8c68ffc5dd87d04f049affabc/ty-0.0.23-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:341441783e626eeb7b1ec2160432956aed5734932ab2d1c26f94d0c98b229937", size = 10156143, upload-time = "2026-03-13T12:34:34.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/81/8a93e923535a340f54bea20ff196f6b2787782b2f2f399bd191c4bc132d6/ty-0.0.23-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ce1dc66c26d4167e2c78d12fa870ef5a7ec9cc344d2baaa6243297cfa88bd52", size = 10136632, upload-time = "2026-03-13T12:34:28.832Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/cb/2ac81c850c58acc9f976814404d28389c9c1c939676e32287b9cff61381e/ty-0.0.23-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bae1e7a294bf8528836f7617dc5c360ea2dddb63789fc9471ae6753534adca05", size = 10655025, upload-time = "2026-03-13T12:34:37.105Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/9b/bac771774c198c318ae699fc013d8cd99ed9caf993f661fba11238759244/ty-0.0.23-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d2b162768764d9dc177c83fb497a51532bb67cbebe57b8fa0f2668436bf53f3c", size = 11230107, upload-time = "2026-03-13T12:34:20.751Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/09/7644fb0e297265e18243f878aca343593323b9bb19ed5278dcbc63781be0/ty-0.0.23-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d28384e48ca03b34e4e2beee0e230c39bbfb68994bb44927fec61ef3642900da", size = 10934177, upload-time = "2026-03-13T12:34:17.904Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/14/69a25a0cad493fb6a947302471b579a03516a3b00e7bece77fdc6b4afb9b/ty-0.0.23-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:559d9a299df793cb7a7902caed5eda8a720ff69164c31c979673e928f02251ee", size = 10752487, upload-time = "2026-03-13T12:34:31.785Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/2a/42fc3cbccf95af0a62308ebed67e084798ab7a85ef073c9986ef18032743/ty-0.0.23-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:32a7b8a14a98e1d20a9d8d2af23637ed7efdb297ac1fa2450b8e465d05b94482", size = 10133007, upload-time = "2026-03-13T12:34:42.838Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/69/307833f1b52fa3670e0a1d496e43ef7df556ecde838192d3fcb9b35e360d/ty-0.0.23-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:6f803b9b9cca87af793467973b9abdd4b83e6b96d9b5e749d662cff7ead70b6d", size = 10169698, upload-time = "2026-03-13T12:34:12.351Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/ae/5dd379ec22d0b1cba410d7af31c366fcedff191d5b867145913a64889f66/ty-0.0.23-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4a0bf086ec8e2197b7ea7ebfcf4be36cb6a52b235f8be61647ef1b2d99d6ffd3", size = 10346080, upload-time = "2026-03-13T12:34:40.012Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/c7/dfc83203d37998620bba9c4873a080c8850a784a8a46f56f8163c5b4e320/ty-0.0.23-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:252539c3fcd7aeb9b8d5c14e2040682c3e1d7ff640906d63fd2c4ce35865a4ba", size = 10848162, upload-time = "2026-03-13T12:34:45.421Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/08/05481511cfbcc1fd834b6c67aaae090cb609a079189ddf2032139ccfc490/ty-0.0.23-py3-none-win32.whl", hash = "sha256:51b591d19eef23bbc3807aef77d38fa1f003c354e1da908aa80ea2dca0993f77", size = 9748283, upload-time = "2026-03-13T12:34:50.607Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/2e/eaed4ff5c85e857a02415084c394e02c30476b65e158eec1938fdaa9a205/ty-0.0.23-py3-none-win_amd64.whl", hash = "sha256:1e137e955f05c501cfbb81dd2190c8fb7d01ec037c7e287024129c722a83c9ad", size = 10698355, upload-time = "2026-03-13T12:34:26.134Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/29/b32cb7b4c7d56b9ed50117f8ad6e45834aec293e4cb14749daab4e9236d5/ty-0.0.23-py3-none-win_arm64.whl", hash = "sha256:a0399bd13fd2cd6683fd0a2d59b9355155d46546d8203e152c556ddbdeb20842", size = 10155890, upload-time = "2026-03-13T12:34:48.082Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/20/2a9ea661758bd67f2bfd54ce9daacb5a26c56c5f8b49fbd9a43b365a8a7d/ty-0.0.27-py3-none-linux_armv6l.whl", hash = "sha256:eb14456b8611c9e8287aa9b633f4d2a0d9f3082a31796969e0b50bdda8930281", size = 10571211, upload-time = "2026-03-31T19:07:23.28Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/b2/8887a51f705d075ddbe78ae7f0d4755ef48d0a90235f67aee289e9cee950/ty-0.0.27-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:02e662184703db7586118df611cf24a000d35dae38d950053d1dd7b6736fd2c4", size = 10427576, upload-time = "2026-03-31T19:07:15.499Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/c3/79d88163f508fb709ce19bc0b0a66c7c64b53d372d4caa56172c3d9b3ae8/ty-0.0.27-py3-none-macosx_11_0_arm64.whl", hash = "sha256:be5fc2899441f7f8f7ef40f9ffd006075a5ff6b06c44e8d2aa30e1b900c12f51", size = 9870359, upload-time = "2026-03-31T19:07:36.852Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/4d/ed1b0db0e1e46b5ed4976bbfe0d1825faf003b4e3774ef28c785ed73e4bb/ty-0.0.27-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30231e652b14742a76b64755e54bf0cb1cd4c128bcaf625222e0ca92a2094887", size = 10380488, upload-time = "2026-03-31T19:07:31.268Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/f2/20372f6d510b01570028433064880adec2f8abe68bf0c4603be61a560bef/ty-0.0.27-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a119b1168f64261b3205a37e40b5b6c4aac8fd58e4587988f4e4b22c3c79847", size = 10390248, upload-time = "2026-03-31T19:07:28.345Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/4b/46b31a7311306be1a560f7f20fdc37b5bf718787f60626cd265d9b637554/ty-0.0.27-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e38f4e187b6975d2cbebf0f1eb1221f8f64f6e509bad14d7bb2a91afc97e4956", size = 10878479, upload-time = "2026-03-31T19:07:39.393Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/ba/5231a2a1fb1cebe053a25de8fded95e1a30a1e77d3628a9e58487297bafc/ty-0.0.27-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a07b1a8fbb23844f6d22091275430d9ac617175f34aa99159b268193de210389", size = 11461232, upload-time = "2026-03-31T19:07:02.518Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/37/558abab3e1f6670493524f61280b4dfcc3219555f13889223e733381dfab/ty-0.0.27-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d3ec4033031f240836bb0337274bac5c49dde312c7c6d7575451ed719bf8ffa3", size = 11133002, upload-time = "2026-03-31T19:07:18.371Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/38/188c14a57f52160407ce62c6abb556011718fd0bcbe1dca690529ce84c46/ty-0.0.27-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:924a8849afd500d260bf5b7296165a05b7424fbb6b19113f30f3b999d682873f", size = 10986624, upload-time = "2026-03-31T19:07:13.066Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/f1/667a71393f47d2cd6ba9ed07541b8df3eb63aab1f2ee658e77d91b8362fa/ty-0.0.27-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d8270026c07e7423a1b3a3fd065b46ed1478748f0662518b523b57744f3fa025", size = 10366721, upload-time = "2026-03-31T19:07:00.131Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/aa/8edafe41be898bda774249abc5be6edd733e53fb1777d59ea9331e38537d/ty-0.0.27-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e26e9735d3bdfd95d881111ad1cf570eab8188d8c3be36d6bcaad044d38984d8", size = 10412239, upload-time = "2026-03-31T19:07:05.297Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/ff/8bafaed4a18d38264f46bdfc427de7ea2974cf9064e4e0bdb1b6e6c724e3/ty-0.0.27-py3-none-musllinux_1_2_i686.whl", hash = "sha256:7c09cc9a699810609acc0090af8d0db68adaee6e60a7c3e05ab80cc954a83db7", size = 10573507, upload-time = "2026-03-31T19:06:57.064Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/2e/63a8284a2fefd08ab56ecbad0fde7dd4b2d4045a31cf24c1d1fcd9643227/ty-0.0.27-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:2d3e02853bb037221a456e034b1898aaa573e6374fbb53884e33cb7513ccb85a", size = 11090233, upload-time = "2026-03-31T19:07:34.139Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/d3/d6fa1cafdfa2b34dbfa304fc6833af8e1669fc34e24d214fa76d2a2e5a25/ty-0.0.27-py3-none-win32.whl", hash = "sha256:34e7377f2047c14dbbb7bf5322e84114db7a5f2cb470db6bee63f8f3550cfc1e", size = 9984415, upload-time = "2026-03-31T19:07:07.98Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/e6/dd4e27da9632b3472d5711ca49dbd3709dbd3e8c73f3af6db9c254235ca9/ty-0.0.27-py3-none-win_amd64.whl", hash = "sha256:3f7e4145aad8b815ed69b324c93b5b773eb864dda366ca16ab8693ff88ce6f36", size = 10961535, upload-time = "2026-03-31T19:07:10.566Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/1a/824b3496d66852ed7d5d68d9787711131552b68dce8835ce9410db32e618/ty-0.0.27-py3-none-win_arm64.whl", hash = "sha256:95bf8d01eb96bb2ba3ffc39faff19da595176448e80871a7b362f4d2de58476c", size = 10376689, upload-time = "2026-03-31T19:07:25.732Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1311,6 +1286,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "verspec"
|
||||
version = "0.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e7/44/8126f9f0c44319b2efc65feaad589cadef4d77ece200ae3c9133d58464d0/verspec-0.1.0.tar.gz", hash = "sha256:c4504ca697b2056cdb4bfa7121461f5a0e81809255b41c03dda4ba823637c01e", size = 27123, upload-time = "2020-11-30T02:24:09.646Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/ce/3b6fee91c85626eaf769d617f1be9d2e15c1cca027bbdeb2e0d751469355/verspec-0.1.0-py3-none-any.whl", hash = "sha256:741877d5633cc9464c45a469ae2a31e801e6dbbaa85b9675d481cda100f11c31", size = 19640, upload-time = "2020-11-30T02:24:08.387Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "watchdog"
|
||||
version = "6.0.0"
|
||||
@@ -1340,7 +1324,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "zensical"
|
||||
version = "0.0.27"
|
||||
version = "0.0.31"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
@@ -1350,18 +1334,18 @@ dependencies = [
|
||||
{ name = "pymdown-extensions" },
|
||||
{ name = "pyyaml" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8f/83/969152d927b522a0fed1f20b1730575d86b920ce51530b669d9fad4537de/zensical-0.0.27.tar.gz", hash = "sha256:6d8d74aba4a9f9505e6ba1c43d4c828ba4ff7bb1ff9b005e5174c5b92cf23419", size = 3841776, upload-time = "2026-03-13T17:56:14.494Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d5/1a/9b6f5285c5aef648db38f9132f49a7059bd2c9d748f68ef0c52ed8afcff3/zensical-0.0.31.tar.gz", hash = "sha256:9c12f07bde70c4bfdb13d6cae1bedf8d18064d257a6e81128a152502b28a8fc3", size = 3891758, upload-time = "2026-04-01T11:30:21.88Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/fe/0335f1a521eb6c0ab96028bf67148390eb1d5c742c23e6a4b0f8381508bd/zensical-0.0.27-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d51ebf4b038f3eea99fd337119b99d92ad92bbe674372d5262e6dbbabbe4e9b5", size = 12262017, upload-time = "2026-03-13T17:55:36.403Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/cb/ac24334fc7959b49496c97cb9d2bed82a8db8b84eafaf68189048e7fe69a/zensical-0.0.27-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:a627cd4599cf2c5a5a5205f0510667227d1fe4579b6f7445adba2d84bab9fbc8", size = 12147361, upload-time = "2026-03-13T17:55:39.736Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/0f/31c981f61006fdaf0460d15bde1248a045178d67307bad61a4588414855d/zensical-0.0.27-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99cbc493022f8749504ef10c71772d360b705b4e2fd1511421393157d07bdccf", size = 12505771, upload-time = "2026-03-13T17:55:42.993Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/1e/f6842c94ec89e5e9184f407dbbab2a497b444b28d4fb5b8df631894be896/zensical-0.0.27-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ecc20a85e8a23ad9ab809b2f268111321be7b2e214021b3b00f138936a87a434", size = 12455689, upload-time = "2026-03-13T17:55:46.055Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/ad/866c3336381cca7528e792469958fbe2e65b9206a2657bef3dd8ed4ac88b/zensical-0.0.27-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da11e0f0861dbd7d3b5e6fe1e3a53b361b2181c53f3abe9fb4cdf2ed0cea47bf", size = 12791263, upload-time = "2026-03-13T17:55:49.193Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/df/fca5ed6bebdb61aa656dfa65cce4b4d03324a79c75857728230872fbdf7c/zensical-0.0.27-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e11d220181477040a4b22bf2b8678d5b0c878e7aae194fad4133561cb976d69", size = 12549796, upload-time = "2026-03-13T17:55:52.55Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/e2/43398b5ec64ed78204a5a5929a3990769fc0f6a3094a30395882bda1399a/zensical-0.0.27-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06b9e308aec8c5db1cd623e2e98e1b25c3f5cab6b25fcc9bac1e16c0c2b93837", size = 12683568, upload-time = "2026-03-13T17:55:56.151Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/3c/5c98f9964c7e30735aacd22a389dacec12bcc5bc8162c58e76b76d20db6e/zensical-0.0.27-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:682085155126965b091cb9f915cd2e4297383ac500122fd4b632cf4511733eb2", size = 12725214, upload-time = "2026-03-13T17:55:59.286Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/0f/ebaa159cac6d64b53bf7134420c2b43399acc7096cb79795be4fb10768fc/zensical-0.0.27-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:b367c285157c8e1099ae9e2b36564e07d3124bf891e96194a093bc836f3058d2", size = 12860416, upload-time = "2026-03-13T17:56:02.456Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/06/d82bfccbf5a1f43256dbc4d1984e398035a65f84f7c1e48b69ba15ea7281/zensical-0.0.27-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:847c881209e65e1db1291c59a9db77966ac50f7c66bf9a733c3c7832144dbfca", size = 12819533, upload-time = "2026-03-13T17:56:05.487Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/1f/d25e421d91f063a9404c59dd032f65a67c7c700e9f5f40436ab98e533482/zensical-0.0.27-cp310-abi3-win32.whl", hash = "sha256:f31ec13c700794be3f9c0b7d90f09a7d23575a3a27c464994b9bb441a22d880b", size = 11862822, upload-time = "2026-03-13T17:56:08.933Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/b5/5b86d126fcc42b96c5dbecde5074d6ea766a1a884e3b25b3524843c5e6a5/zensical-0.0.27-cp310-abi3-win_amd64.whl", hash = "sha256:9d3b1fca7ea99a7b2a8db272dd7f7839587c4ebf4f56b84ff01c97b3893ec9f8", size = 12059658, upload-time = "2026-03-13T17:56:11.859Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/db/cc4e555d2e816f2d91304ff969d62cc3a401ee477dbb7c720b874bec67d6/zensical-0.0.31-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b489936d670733dd204f16b689a2acc0e45b69e42cc4901f5131ae57658b8fbc", size = 12419980, upload-time = "2026-04-01T11:29:44.01Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/c1/6789f73164c7f5821f5defb8a80b1dba8d5af24bdec7db36876793c5afd9/zensical-0.0.31-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:d9f678efc0d9918e45eeb8bc62847b2cce23db7393c8c59c1be6d1c064bbaacd", size = 12292301, upload-time = "2026-04-01T11:29:47.277Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/9a/6a83ad209081a953e0285d5056e5452c4fbcabd2f104f3797d53e4bdd96f/zensical-0.0.31-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b50ecf674997f818e53f12f2a67875a21b0c79ed74c151dfaef2f1475e5bf", size = 12661472, upload-time = "2026-04-01T11:29:50.706Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/4a/a82f5c81893b7a607cf9d439b75c3c3894b4ef4d3e92d5d818b4fa5c6f23/zensical-0.0.31-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6fb5c634fe88254770a2d4db5c05b06f1c3ee5e29d2ae3e7efdae8905e435b1d", size = 12603784, upload-time = "2026-04-01T11:29:53.623Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/1c/79c198628b8e006be32dfb1c5b73561757a349a6cf3069600a67ffa62495/zensical-0.0.31-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e64630552793274db1ec66c971e49a15ad351536d5d12de67ec6da7358ac50", size = 12959832, upload-time = "2026-04-01T11:29:56.736Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/9d/45839d9ca0f69622e8a3b944f2d8d7f7d2b7c2da78201079c4feb275feb6/zensical-0.0.31-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:738a2fd5832e3b3c10ff642eebaf89c89ca1d28e4451dad0f36fdac53c415577", size = 12704024, upload-time = "2026-04-01T11:29:59.836Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/5f/451d7f4d94092bc38bd8d514826fb7b0329c188db506795b1d20bd07d517/zensical-0.0.31-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:bd601f6132e285ef6c3e4c3852be2094fc0473295a8080003db76a79760f84fb", size = 12837788, upload-time = "2026-04-01T11:30:03.048Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/39/390a8fc384fb174ebd4450343a0aa2877b3a31ddcedf5ef0b8d26944e12c/zensical-0.0.31-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:dc3b6a9dfb5903c0aa779ef65cd6185add2b8aa1db237be840874b8c9db761b8", size = 12876822, upload-time = "2026-04-01T11:30:06.418Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/60/640da2f095782cf38974cd851fb7afa62651d09a36543a1d8942b31aabdc/zensical-0.0.31-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:ddd4321b275e82c4897aa45b05038ce204b88fb311ad55f8c2af572173a9b56c", size = 13024036, upload-time = "2026-04-01T11:30:09.501Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/06/0564377cbfccea3653254adfa851c1b20d1696e4b16770c7b2e1dd1ef1d7/zensical-0.0.31-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:147ab4bc17f3088f703aa6c4b9c416411f4ea8ca64d26f6586beae49d97fd3c7", size = 12975505, upload-time = "2026-04-01T11:30:12.268Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/4b/b8a0c4e5937cb05882dcce667798403e00897135080a69f92363e5e3ff9f/zensical-0.0.31-cp310-abi3-win32.whl", hash = "sha256:03fa11e629a308507693489541f43e751697784e94365e7435b02104aefd1c2c", size = 12011233, upload-time = "2026-04-01T11:30:15.496Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/99/0eacdb466d344c0c86596932201268517be42f3e0bb6c78b2b0cd84c55f6/zensical-0.0.31-cp310-abi3-win_amd64.whl", hash = "sha256:d6621d4bb46af4143560045d4a18c8c76302db56bf1dbb6e2ce107d7fb643e09", size = 12207545, upload-time = "2026-04-01T11:30:19.054Z" },
|
||||
]
|
||||
|
||||
@@ -2,10 +2,15 @@
|
||||
site_name = "FastAPI Toolsets"
|
||||
site_description = "Production-ready utilities for FastAPI applications."
|
||||
site_author = "d3vyce"
|
||||
site_url = "https://fastapi-toolsets.d3vyce.fr"
|
||||
site_url = "https://fastapi-toolsets.d3vyce.fr/"
|
||||
copyright = "Copyright © 2026 d3vyce"
|
||||
repo_url = "https://github.com/d3vyce/fastapi-toolsets"
|
||||
|
||||
[project.extra.version]
|
||||
provider = "mike"
|
||||
default = "stable"
|
||||
alias = true
|
||||
|
||||
[project.theme]
|
||||
custom_dir = "docs/overrides"
|
||||
language = "en"
|
||||
@@ -140,6 +145,7 @@ Examples = [
|
||||
|
||||
[[project.nav]]
|
||||
Migration = [
|
||||
{"v3.0" = "migration/v3.md"},
|
||||
{"v2.0" = "migration/v2.md"},
|
||||
]
|
||||
|
||||
|
||||
Reference in New Issue
Block a user